Esempio n. 1
0
def apply_patch(patch_file, dest, fn=None, copy=False, level=None):
    """
    Apply a patch to source code in directory dest
    - assume unified diff created with "diff -ru old new"
    """

    if build_option('extended_dry_run'):
        # skip checking of files in dry run mode
        patch_filename = os.path.basename(patch_file)
        dry_run_msg("* applying patch file %s" % patch_filename, silent=build_option('silent'))

    elif not os.path.isfile(patch_file):
        raise EasyBuildError("Can't find patch %s: no such file", patch_file)

    elif fn and not os.path.isfile(fn):
        raise EasyBuildError("Can't patch file %s: no such file", fn)

    elif not os.path.isdir(dest):
        raise EasyBuildError("Can't patch directory %s: no such directory", dest)

    # copy missing files
    if copy:
        if build_option('extended_dry_run'):
            dry_run_msg("  %s copied to %s" % (patch_file, dest), silent=build_option('silent'))
        else:
            try:
                shutil.copy2(patch_file, dest)
                _log.debug("Copied patch %s to dir %s" % (patch_file, dest))
                # early exit, work is done after copying
                return True
            except IOError, err:
                raise EasyBuildError("Failed to copy %s to dir %s: %s", patch_file, dest, err)
Esempio n. 2
0
def update_pr(pr, paths, commit_msg=None):
    """Update specified pull request using specified files."""

    _log.experimental("Updating pull request #%s with %s", pr, paths)

    github_user = build_option('github_user')
    if github_user is None:
        raise EasyBuildError("GitHub user must be specified to use --update-pr")

    pr_target_account = build_option('pr_target_account')
    pr_target_repo = build_option('pr_target_repo')

    pr_url = lambda g: g.repos[pr_target_account][pr_target_repo].pulls[pr]
    status, pr_data = github_api_get_request(pr_url, github_user)
    if not status == HTTP_STATUS_OK:
        raise EasyBuildError("Failed to get data for PR #%d from %s/%s (status: %d %s)",
                             pr, pr_target_account, pr_target_repo, status, pr_data)

    # branch that corresponds with PR is supplied in form <account>:<branch_label>
    account = pr_data['head']['label'].split(':')[0]
    branch = ':'.join(pr_data['head']['label'].split(':')[1:])
    github_target = '%s/%s' % (pr_target_account, pr_target_repo)
    print_msg("Determined branch name corresponding to %s PR #%s: %s" % (github_target, pr, branch), log=_log)

    _, _, _, diff_stat = _easyconfigs_pr_common(paths, start_branch=branch, pr_branch=branch,
                                                target_account=account, commit_msg=commit_msg)

    print_msg("Overview of changes:\n%s\n" % diff_stat, log=_log, prefix=False)

    full_repo = '%s/%s' % (pr_target_account, pr_target_repo)
    msg = "Updated %s PR #%s by pushing to branch %s/%s" % (full_repo, pr, account, branch)
    if build_option('dry_run') or build_option('extended_dry_run'):
        msg += " [DRY RUN]"
    print_msg(msg, log=_log, prefix=False)
Esempio n. 3
0
def apply_regex_substitutions(path, regex_subs):
    """
    Apply specified list of regex substitutions.

    @param path: path to file to patch
    @param regex_subs: list of substitutions to apply, specified as (<regexp pattern>, <replacement string>)
    """
    # only report when in 'dry run' mode
    if build_option('extended_dry_run'):
        dry_run_msg("applying regex substitutions to file %s" % path, silent=build_option('silent'))
        for regex, subtxt in regex_subs:
            dry_run_msg("  * regex pattern '%s', replacement string '%s'" % (regex, subtxt))

    else:
        _log.debug("Applying following regex substitutions to %s: %s", path, regex_subs)

        for i, (regex, subtxt) in enumerate(regex_subs):
            regex_subs[i] = (re.compile(regex), subtxt)

        try:
            for line in fileinput.input(path, inplace=1, backup='.orig.eb'):
                for regex, subtxt in regex_subs:
                    line = regex.sub(subtxt, line)
                sys.stdout.write(line)

        except OSError, err:
            raise EasyBuildError("Failed to patch %s: %s", path, err)
    def load_module(self, mod_name, recursive_unload=False, depends_on=False, unload_modules=None):
        """
        Generate load statement for specified module.

        :param mod_name: name of module to generate load statement for
        :param recursive_unload: boolean indicating whether the 'load' statement should be reverted on unload
        :param unload_module: name(s) of module to unload first
        """
        body = []
        if unload_modules:
            body.extend([self.unload_module(m).strip() for m in unload_modules])
        load_template = self.LOAD_TEMPLATE
        # Lmod 7.6.1+ supports depends-on which does this most nicely:
        if build_option('mod_depends_on') or depends_on:
            if not modules_tool().supports_depends_on:
                raise EasyBuildError("depends-on statements in generated module are not supported by modules tool")
            load_template = self.LOAD_TEMPLATE_DEPENDS_ON
        body.append(load_template)

        if build_option('recursive_mod_unload') or recursive_unload or load_template == self.LOAD_TEMPLATE_DEPENDS_ON:
            # not wrapping the 'module load' with an is-loaded guard ensures recursive unloading;
            # when "module unload" is called on the module in which the dependency "module load" is present,
            # it will get translated to "module unload"
            load_statement = body + ['']
        else:
            load_statement = [self.conditional_statement("is-loaded %(mod_name)s", '\n'.join(body), negative=True)]

        return '\n'.join([''] + load_statement) % {'mod_name': mod_name}
Esempio n. 5
0
def overall_test_report(ecs_with_res, orig_cnt, success, msg, init_session_state):
    """
    Upload/dump overall test report
    @param ecs_with_res: processed easyconfigs with build result (success/failure)
    @param orig_cnt: number of original easyconfig paths
    @param success: boolean indicating whether all builds were successful
    @param msg: message to be included in test report
    @param init_session_state: initial session state info to include in test report
    """
    dump_path = build_option('dump_test_report')
    pr_nr = build_option('from_pr')
    upload = build_option('upload_test_report')

    if upload:
        msg = msg + " (%d easyconfigs in this PR)" % orig_cnt
        test_report = create_test_report(msg, ecs_with_res, init_session_state, pr_nr=pr_nr, gist_log=True)
        if pr_nr:
            # upload test report to gist and issue a comment in the PR to notify
            txt = post_easyconfigs_pr_test_report(pr_nr, test_report, msg, init_session_state, success)
        else:
            # only upload test report as a gist
            gist_url = upload_test_report_as_gist(test_report)
            txt = "Test report uploaded to %s" % gist_url
    else:
        test_report = create_test_report(msg, ecs_with_res, init_session_state)
        txt = None
    _log.debug("Test report: %s" % test_report)

    if dump_path is not None:
        write_file(dump_path, test_report)
        _log.info("Test report dumped to %s" % dump_path)

    return txt
Esempio n. 6
0
    def init(self):
        """
        Initialise the GC3Pie job backend.
        """
        # List of config files for GC3Pie; non-existing ones will be
        # silently ignored.  The list here copies GC3Pie's default,
        # for the principle of minimal surprise, but there is no
        # strict requirement that this be done and EB could actually
        # choose to use a completely distinct set of conf. files.
        self.config_files = gc3libs.Default.CONFIG_FILE_LOCATIONS[:]
        cfgfile = build_option('job_backend_config')
        if cfgfile:
            self.config_files.append(cfgfile)

        self.output_dir = build_option('job_output_dir')
        self.job_cnt = 0

        job_deps_type = build_option('job_deps_type')
        if job_deps_type is None:
            job_deps_type = JOB_DEPS_TYPE_ABORT_ON_ERROR
            self.log.info("Using default job dependency type: %s", job_deps_type)
        else:
            self.log.info("Using specified job dependency type: %s", job_deps_type)

        if job_deps_type == JOB_DEPS_TYPE_ALWAYS_RUN:
            self.jobs = DependentTaskCollection(output_dir=self.output_dir)
        elif job_deps_type == JOB_DEPS_TYPE_ABORT_ON_ERROR:
            self.jobs = AbortingDependentTaskCollection(output_dir=self.output_dir)
        else:
            raise EasyBuildError("Unknown job dependency type specified: %s", job_deps_type)

        # after polling for job status, sleep for this time duration
        # before polling again (in seconds)
        self.poll_interval = build_option('job_polling_interval')
Esempio n. 7
0
def det_easyconfig_paths(orig_paths):
    """
    Determine paths to easyconfig files.
    @param orig_paths: list of original easyconfig paths
    @return: list of paths to easyconfig files
    """
    from_pr = build_option('from_pr')
    robot_path = build_option('robot_path')

    # list of specified easyconfig files
    ec_files = orig_paths[:]

    if from_pr is not None:
        pr_files = fetch_easyconfigs_from_pr(from_pr)

        if ec_files:
            # replace paths for specified easyconfigs that are touched in PR
            for i, ec_file in enumerate(ec_files):
                for pr_file in pr_files:
                    if ec_file == os.path.basename(pr_file):
                        ec_files[i] = pr_file
        else:
            # if no easyconfigs are specified, use all the ones touched in the PR
            ec_files = [path for path in pr_files if path.endswith('.eb')]

    if ec_files and robot_path:
        # look for easyconfigs with relative paths in robot search path,
        # unless they were found at the given relative paths

        # determine which easyconfigs files need to be found, if any
        ecs_to_find = []
        for idx, ec_file in enumerate(ec_files):
            if ec_file == os.path.basename(ec_file) and not os.path.exists(ec_file):
                ecs_to_find.append((idx, ec_file))
        _log.debug("List of easyconfig files to find: %s" % ecs_to_find)

        # find missing easyconfigs by walking paths in robot search path
        for path in robot_path:
            _log.debug("Looking for missing easyconfig files (%d left) in %s..." % (len(ecs_to_find), path))
            for (subpath, dirnames, filenames) in os.walk(path, topdown=True):
                for idx, orig_path in ecs_to_find[:]:
                    if orig_path in filenames:
                        full_path = os.path.join(subpath, orig_path)
                        _log.info("Found %s in %s: %s" % (orig_path, path, full_path))
                        ec_files[idx] = full_path
                        # if file was found, stop looking for it (first hit wins)
                        ecs_to_find.remove((idx, orig_path))

                # stop os.walk insanity as soon as we have all we need (os.walk loop)
                if not ecs_to_find:
                    break

                # ignore subdirs specified to be ignored by replacing items in dirnames list used by os.walk
                dirnames[:] = [d for d in dirnames if d not in build_option('ignore_dirs')]

            # stop os.walk insanity as soon as we have all we need (outer loop)
            if not ecs_to_find:
                break

    return ec_files
Esempio n. 8
0
def run_cmd_qa(cmd, qa, no_qa=None, log_ok=True, log_all=False, simple=False, regexp=True, std_qa=None, path=None, maxhits=50):
    """
    Run specified interactive command (in a subshell)
    @param cmd: command to run
    @param qa: dictionary which maps question to answers
    @param no_qa: list of patters that are not questions
    @param log_ok: only run output/exit code for failing commands (exit code non-zero)
    @param log_all: always log command output and exit code
    @param simple: if True, just return True/False to indicate success, else return a tuple: (output, exit_code)
    @param regex: regex used to check the output for errors; if True it will use the default (see parse_log_for_error)
    @param std_qa: dictionary which maps question regex patterns to answers
    @param path: path to execute the command is; current working directory is used if unspecified
    """
    cwd = os.getcwd()

    # early exit in 'dry run' mode, after printing the command that would be run
    if build_option('extended_dry_run'):
        if path is None:
            path = cwd
        dry_run_msg("  running interactive command \"%s\"" % cmd, silent=build_option('silent'))
        dry_run_msg("  (in %s)" % path, silent=build_option('silent'))
        if simple:
            return True
        else:
            # output, exit code
            return ('', 0)

    try:
        if path:
            os.chdir(path)

        _log.debug("run_cmd_qa: running cmd %s (in %s)" % (cmd, os.getcwd()))
    except OSError, err:
        _log.warning("Failed to change to %s: %s" % (path, err))
        _log.info("running cmd %s in non-existing directory, might fail!" % cmd)
Esempio n. 9
0
def parse_easyconfigs(paths, validate=True):
    """
    Parse easyconfig files
    :param paths: paths to easyconfigs
    """
    easyconfigs = []
    generated_ecs = False

    for (path, generated) in paths:
        path = os.path.abspath(path)
        # keep track of whether any files were generated
        generated_ecs |= generated
        if not os.path.exists(path):
            raise EasyBuildError("Can't find path %s", path)
        try:
            ec_files = find_easyconfigs(path, ignore_dirs=build_option('ignore_dirs'))
            for ec_file in ec_files:
                kwargs = {'validate': validate}
                # only pass build specs when not generating easyconfig files
                if not build_option('try_to_generate'):
                    kwargs['build_specs'] = build_option('build_specs')

                easyconfigs.extend(process_easyconfig(ec_file, **kwargs))

        except IOError, err:
            raise EasyBuildError("Processing easyconfigs in path %s failed: %s", path, err)
Esempio n. 10
0
    def test_strict(self):
        """Test use of --strict."""
        # check default
        self.assertEqual(build_option('strict'), run.WARN)

        for strict_str, strict_val in [('error', run.ERROR), ('ignore', run.IGNORE), ('warn', run.WARN)]:
            options = init_config(args=['--strict=%s' % strict_str])
            init_config(build_options={'strict': options.strict})
            self.assertEqual(build_option('strict'), strict_val)
Esempio n. 11
0
    def __init__(self, name=None, version=None, mns=None, class_constants=None, tcdeps=None, modtool=None,
                 hidden=False):
        """
        Toolchain constructor.

        :param name: toolchain name
        :param version: toolchain version
        :param mns: module naming scheme to use
        :param class_constants: toolchain 'constants' to define
        :param tcdeps: list of toolchain 'dependencies' (i.e., the toolchain components)
        :param modtool: ModulesTool instance to use
        :param hidden: bool indicating whether toolchain is hidden or not
        """
        self.base_init()

        self.dependencies = []
        self.toolchain_dep_mods = []
        self.cached_compilers = set()

        if name is None:
            name = self.NAME
        if name is None:
            raise EasyBuildError("Toolchain init: no name provided")
        self.name = name
        if version is None:
            version = self.VERSION
        if version is None:
            raise EasyBuildError("Toolchain init: no version provided")
        self.version = version

        self.modules = []
        self.vars = None

        self._init_class_constants(class_constants)

        self.tcdeps = tcdeps

        # toolchain instances are created before initiating build options sometimes, e.g. for --list-toolchains
        self.dry_run = build_option('extended_dry_run', default=False)
        hidden_toolchains = build_option('hide_toolchains', default=None) or []
        self.hidden = hidden or (name in hidden_toolchains)

        self.modules_tool = modtool

        self.use_rpath = False

        self.mns = mns
        self.mod_full_name = None
        self.mod_short_name = None
        self.init_modpaths = None
        if self.name != DUMMY_TOOLCHAIN_NAME:
            # sometimes no module naming scheme class instance can/will be provided, e.g. with --list-toolchains
            if self.mns is not None:
                tc_dict = self.as_dict()
                self.mod_full_name = self.mns.det_full_module_name(tc_dict)
                self.mod_short_name = self.mns.det_short_module_name(tc_dict)
                self.init_modpaths = self.mns.det_init_modulepaths(tc_dict)
Esempio n. 12
0
    def complete(self):
        """
        Complete a bulk job submission.

        Create engine, and progress it until all jobs have terminated.
        """
        # create an instance of `Engine` using the list of configuration files
        try:
            self._engine = create_engine(*self.config_files, resource_errors_are_fatal=True)

        except gc3libs.exceptions.Error as err:
            raise EasyBuildError("Failed to create GC3Pie engine: %s", err)

        # make sure that all job log files end up in the same directory, rather than renaming the output directory
        # see https://gc3pie.readthedocs.org/en/latest/programmers/api/gc3libs/core.html#gc3libs.core.Engine
        self._engine.retrieve_overwrites = True

        # some sites may not be happy with flooding the cluster with build jobs...
        self._engine.max_in_flight = build_option('job_max_jobs')

        # `Engine.stats()` (which is used later on in `_print_status_report()`)
        # changed between 2.4.2 and 2.5.0.dev -- make sure we stay compatible
        # with both
        try:
            self._engine.init_stats_for(Application)
        except AttributeError:
            _log.debug("No `init_stats_for` method in the Engine class;"
                       " assuming pre-2.5.0 GC3Pie and ignoring error.")

        # Add your application to the engine. This will NOT submit
        # your application yet, but will make the engine *aware* of
        # the application.
        self._engine.add(self.jobs)

        # in case you want to select a specific resource, call
        target_resource = build_option('job_target_resource')
        if target_resource:
            res = self._engine.select_resource(target_resource)
            if res == 0:
                raise EasyBuildError("Failed to select target resource '%s' in GC3Pie", target_resource)

        # Periodically check the status of your application.
        while self.jobs.execution.state != Run.State.TERMINATED:
            # `Engine.progress()` will do the GC3Pie magic:
            # submit new jobs, update status of submitted jobs, get
            # results of terminating jobs etc...
            self._engine.progress()

            # report progress
            self._print_status_report()

            # Wait a few seconds...
            time.sleep(self.poll_interval)

        # final status report
        print_msg("Done processing jobs", log=self.log, silent=build_option('silent'))
        self._print_status_report()
Esempio n. 13
0
def run_cmd_qa(cmd, qa, no_qa=None, log_ok=True, log_all=False, simple=False, regexp=True, std_qa=None, path=None,
               maxhits=50, trace=True):
    """
    Run specified interactive command (in a subshell)
    :param cmd: command to run
    :param qa: dictionary which maps question to answers
    :param no_qa: list of patters that are not questions
    :param log_ok: only run output/exit code for failing commands (exit code non-zero)
    :param log_all: always log command output and exit code
    :param simple: if True, just return True/False to indicate success, else return a tuple: (output, exit_code)
    :param regex: regex used to check the output for errors; if True it will use the default (see parse_log_for_error)
    :param std_qa: dictionary which maps question regex patterns to answers
    :param path: path to execute the command is; current working directory is used if unspecified
    :param maxhits: maximum number of cycles (seconds) without being able to find a known question
    :param trace: print command being executed as part of trace output
    """
    cwd = os.getcwd()

    if log_all or (trace and build_option('trace')):
        # collect output of running command in temporary log file, if desired
        fd, cmd_log_fn = tempfile.mkstemp(suffix='.log', prefix='easybuild-run_cmd_qa-')
        os.close(fd)
        try:
            cmd_log = open(cmd_log_fn, 'w')
        except IOError as err:
            raise EasyBuildError("Failed to open temporary log file for output of interactive command: %s", err)
        _log.debug('run_cmd_qa: Output of "%s" will be logged to %s' % (cmd, cmd_log_fn))
    else:
        cmd_log_fn, cmd_log = None, None

    start_time = datetime.now()
    if trace:
        trace_txt = "running interactive command:\n"
        trace_txt += "\t[started at: %s]\n" % start_time.strftime('%Y-%m-%d %H:%M:%S')
        trace_txt += "\t[output logged in %s]\n" % cmd_log_fn
        trace_msg(trace_txt + '\t' + cmd.strip())

    # early exit in 'dry run' mode, after printing the command that would be run
    if build_option('extended_dry_run'):
        if path is None:
            path = cwd
        dry_run_msg("  running interactive command \"%s\"" % cmd, silent=build_option('silent'))
        dry_run_msg("  (in %s)" % path, silent=build_option('silent'))
        if simple:
            return True
        else:
            # output, exit code
            return ('', 0)

    try:
        if path:
            os.chdir(path)

        _log.debug("run_cmd_qa: running cmd %s (in %s)" % (cmd, os.getcwd()))
    except OSError, err:
        _log.warning("Failed to change to %s: %s" % (path, err))
        _log.info("running cmd %s in non-existing directory, might fail!" % cmd)
Esempio n. 14
0
def dry_run(easyconfigs, modtool, short=False):
    """
    Compose dry run overview for supplied easyconfigs:
    * [ ] for unavailable
    * [x] for available
    * [F] for forced
    * [R] for rebuild
    :param easyconfigs: list of parsed easyconfigs (EasyConfig instances)
    :param modtool: ModulesTool instance to use
    :param short: use short format for overview: use a variable for common prefixes
    """
    lines = []
    if build_option('robot_path') is None:
        lines.append("Dry run: printing build status of easyconfigs")
        all_specs = easyconfigs
    else:
        lines.append("Dry run: printing build status of easyconfigs and dependencies")
        all_specs = resolve_dependencies(easyconfigs, modtool, retain_all_deps=True, raise_error_missing_ecs=False)

    unbuilt_specs = skip_available(all_specs, modtool)
    dry_run_fmt = " * [%1s] %s (module: %s)"  # markdown compatible (list of items with checkboxes in front)

    listed_ec_paths = [spec['spec'] for spec in easyconfigs]

    var_name = 'CFGS'
    common_prefix = det_common_path_prefix([spec['spec'] for spec in all_specs if spec['spec'] is not None])
    # only allow short if common prefix is long enough
    short = short and common_prefix is not None and len(common_prefix) > len(var_name) * 2
    for spec in all_specs:
        if spec in unbuilt_specs:
            ans = ' '
        elif build_option('force') and spec['spec'] in listed_ec_paths:
            ans = 'F'
        elif build_option('rebuild') and spec['spec'] in listed_ec_paths:
            ans = 'R'
        else:
            ans = 'x'

        if spec['ec'] is not None and spec['ec'].short_mod_name != spec['ec'].full_mod_name:
            mod = "%s | %s" % (spec['ec'].mod_subdir, spec['ec'].short_mod_name)
        else:
            mod = spec['full_mod_name']

        if spec['spec'] is None:
            item = "(no easyconfig file found)"
        elif short:
            item = os.path.join('$%s' % var_name, spec['spec'][len(common_prefix) + 1:])
        else:
            item = spec['spec']

        lines.append(dry_run_fmt % (ans, item, mod))

    if short:
        # insert after 'Dry run:' message
        lines.insert(1, "%s=%s" % (var_name, common_prefix))
    return '\n'.join(lines)
Esempio n. 15
0
 def __init__(self, easyconfigs):
     self.container_base = build_option('container_base')
     self.container_build_image = build_option('container_build_image')
     self.container_path = container_path()
     self.easyconfigs = easyconfigs
     self.image_format = build_option('container_image_format')
     self.img_name = build_option('container_image_name')
     self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
     self.mns = ActiveMNS()
     self.tmpdir = build_option('container_tmpdir')
Esempio n. 16
0
def search_easyconfigs(query, short=False):
    """Search for easyconfigs, if a query is provided."""
    robot_path = build_option("robot_path")
    if robot_path:
        search_path = robot_path
    else:
        search_path = [os.getcwd()]
    ignore_dirs = build_option("ignore_dirs")
    silent = build_option("silent")
    search_file(search_path, query, short=short, ignore_dirs=ignore_dirs, silent=silent)
Esempio n. 17
0
    def _set_compiler_flags(self):
        """Collect the flags set, and add them as variables too"""

        flags = [self.options.option(x) for x in self.COMPILER_FLAGS if self.options.get(x, False)]
        cflags = [self.options.option(x) for x in self.COMPILER_C_FLAGS + self.COMPILER_C_UNIQUE_FLAGS \
                  if self.options.get(x, False)]
        fflags = [self.options.option(x) for x in self.COMPILER_F_FLAGS + self.COMPILER_F_UNIQUE_FLAGS \
                  if self.options.get(x, False)]

        # Allow a user-defined default optimisation
        default_opt_level = build_option('default_opt_level')
        if default_opt_level not in self.COMPILER_OPT_FLAGS:
            raise EasyBuildError("Unknown value for default optimisation: %s (possibilities are %s)" %
                                 (default_opt_level, self.COMPILER_OPT_FLAGS))

        # 1st one is the one to use. add default at the end so len is at least 1
        optflags = ([self.options.option(x) for x in self.COMPILER_OPT_FLAGS if self.options.get(x, False)] + \
                    [self.options.option(default_opt_level)])[:1]

        # only apply if the vectorize toolchainopt is explicitly set
        # otherwise the individual compiler toolchain file should make sure that
        # vectorization is disabled for noopt and lowopt, and enabled otherwise.
        if self.options.get('vectorize') is not None:
            vectoptions = self.options.option('vectorize')
            vectflags = vectoptions[self.options['vectorize']]
            # avoid double use of such flags, or e.g. -fno-tree-vectorize followed by -ftree-vectorize
            if isinstance(optflags[0], list):
                optflags[0] = [flag for flag in optflags[0] if flag not in vectoptions.values()]
            optflags.append(vectflags)

        optarchflags = []
        if build_option('optarch') == OPTARCH_GENERIC:
            # don't take 'optarch' toolchain option into account when --optarch=GENERIC is used,
            # *always* include the flags that correspond to generic compilation (which are listed in 'optarch' option)
            optarchflags.append(self.options.option('optarch'))
        elif self.options.get('optarch', False):
            optarchflags.append(self.options.option('optarch'))

        precflags = [self.options.option(x) for x in self.COMPILER_PREC_FLAGS if self.options.get(x, False)] + \
                    [self.options.option('defaultprec')]

        self.variables.nextend('OPTFLAGS', optflags + optarchflags)
        self.variables.nextend('PRECFLAGS', precflags[:1])

        # precflags last
        for var in ['CFLAGS', 'CXXFLAGS']:
            self.variables.join(var, 'OPTFLAGS', 'PRECFLAGS')
            self.variables.nextend(var, flags)
            self.variables.nextend(var, cflags)

        for var in ['FCFLAGS', 'FFLAGS', 'F90FLAGS']:
            self.variables.join(var, 'OPTFLAGS', 'PRECFLAGS')
            self.variables.nextend(var, flags)
            self.variables.nextend(var, fflags)
Esempio n. 18
0
def search_easyconfigs(query, short=False, filename_only=False, terse=False):
    """Search for easyconfigs, if a query is provided."""
    robot_path = build_option('robot_path')
    if robot_path:
        search_path = robot_path
    else:
        search_path = [os.getcwd()]
    ignore_dirs = build_option('ignore_dirs')
    silent = build_option('silent')
    search_file(search_path, query, short=short, ignore_dirs=ignore_dirs, silent=silent, filename_only=filename_only,
                terse=terse)
Esempio n. 19
0
def create_job(job_backend, build_command, easyconfig, output_dir='easybuild-build'):
    """
    Creates a job to build a *single* easyconfig.

    :param job_backend: A factory object for querying server parameters and creating actual job objects
    :param build_command: format string for command, full path to an easyconfig file will be substituted in it
    :param easyconfig: easyconfig as processed by process_easyconfig
    :param output_dir: optional output path; --regtest-output-dir will be used inside the job with this variable

    returns the job
    """
    # capture PYTHONPATH, MODULEPATH and all variables starting with EASYBUILD
    easybuild_vars = {}
    for name in os.environ:
        if name.startswith("EASYBUILD"):
            easybuild_vars[name] = os.environ[name]

    for env_var in ["PYTHONPATH", "MODULEPATH"]:
        if env_var in os.environ:
            easybuild_vars[env_var] = os.environ[env_var]

    _log.info("Dictionary of environment variables passed to job: %s" % easybuild_vars)

    # obtain unique name based on name/easyconfig version tuple
    ec_tuple = (easyconfig['ec']['name'], det_full_ec_version(easyconfig['ec']))
    name = '-'.join(ec_tuple)

    # determine whether additional options need to be passed to the 'eb' command
    add_opts = ''
    if easyconfig['hidden']:
        add_opts += ' --hidden'

    # create command based on build_command template
    command = build_command % {
        'add_opts': add_opts,
        'output_dir': os.path.join(os.path.abspath(output_dir), name),
        'spec': easyconfig['spec'],
    }

    # just use latest build stats
    repo = init_repository(get_repository(), get_repositorypath())
    buildstats = repo.get_buildstats(*ec_tuple)
    extra = {}
    if buildstats:
        previous_time = buildstats[-1]['build_time']
        extra['hours'] = int(math.ceil(previous_time * 2 / 60))

    if build_option('job_cores'):
        extra['cores'] = build_option('job_cores')

    job = job_backend.make_job(command, name, easybuild_vars, **extra)
    job.module = easyconfig['ec'].full_mod_name

    return job
Esempio n. 20
0
def dry_run(easyconfigs, modtool, short=False):
    """
    Compose dry run overview for supplied easyconfigs:
    * [ ] for unavailable
    * [x] for available
    * [F] for forced
    * [R] for rebuild
    @param easyconfigs: list of parsed easyconfigs (EasyConfig instances)
    @param modtool: ModulesTool instance to use
    @param short: use short format for overview: use a variable for common prefixes
    """
    lines = []
    if build_option("robot_path") is None:
        lines.append("Dry run: printing build status of easyconfigs")
        all_specs = easyconfigs
    else:
        lines.append("Dry run: printing build status of easyconfigs and dependencies")
        all_specs = resolve_dependencies(easyconfigs, modtool, retain_all_deps=True)

    unbuilt_specs = skip_available(all_specs, modtool)
    dry_run_fmt = " * [%1s] %s (module: %s)"  # markdown compatible (list of items with checkboxes in front)

    listed_ec_paths = [spec["spec"] for spec in easyconfigs]

    var_name = "CFGS"
    common_prefix = det_common_path_prefix([spec["spec"] for spec in all_specs])
    # only allow short if common prefix is long enough
    short = short and common_prefix is not None and len(common_prefix) > len(var_name) * 2
    for spec in all_specs:
        if spec in unbuilt_specs:
            ans = " "
        elif build_option("force") and spec["spec"] in listed_ec_paths:
            ans = "F"
        elif build_option("rebuild") and spec["spec"] in listed_ec_paths:
            ans = "R"
        else:
            ans = "x"

        if spec["ec"].short_mod_name != spec["ec"].full_mod_name:
            mod = "%s | %s" % (spec["ec"].mod_subdir, spec["ec"].short_mod_name)
        else:
            mod = spec["ec"].full_mod_name

        if short:
            item = os.path.join("$%s" % var_name, spec["spec"][len(common_prefix) + 1 :])
        else:
            item = spec["spec"]
        lines.append(dry_run_fmt % (ans, item, mod))

    if short:
        # insert after 'Dry run:' message
        lines.insert(1, "%s=%s" % (var_name, common_prefix))
    return "\n".join(lines)
Esempio n. 21
0
def search_easyconfigs(query, short=False, filename_only=False, terse=False):
    """Search for easyconfigs, if a query is provided."""
    search_path = build_option("robot_path")
    if not search_path:
        search_path = [os.getcwd()]

    ignore_dirs = build_option("ignore_dirs")

    # note: don't pass down 'filename_only' here, we need the full path to filter out archived easyconfigs
    var_defs, _hits = search_file(
        search_path, query, short=short, ignore_dirs=ignore_dirs, terse=terse, silent=True, filename_only=False
    )

    # filter out archived easyconfigs, these are handled separately
    hits, archived_hits = [], []
    for hit in _hits:
        if EASYCONFIGS_ARCHIVE_DIR in hit.split(os.path.sep):
            archived_hits.append(hit)
        else:
            hits.append(hit)

    # check whether only filenames should be printed
    if filename_only:
        hits = [os.path.basename(hit) for hit in hits]
        archived_hits = [os.path.basename(hit) for hit in archived_hits]

    # prepare output format
    if terse:
        lines, tmpl = [], "%s"
    else:
        lines = ["%s=%s" % var_def for var_def in var_defs]
        tmpl = " * %s"

    # non-archived hits are shown first
    lines.extend(tmpl % hit for hit in hits)

    # also take into account archived hits
    if archived_hits:
        if build_option("consider_archived_easyconfigs"):
            if not terse:
                lines.extend(["", "Matching archived easyconfigs:", ""])
            lines.extend(tmpl % hit for hit in archived_hits)
        elif not terse:
            cnt = len(archived_hits)
            lines.extend(
                [
                    "",
                    "Note: %d matching archived easyconfig(s) found, use --consider-archived-easyconfigs to see them"
                    % cnt,
                ]
            )

    print "\n".join(lines)
Esempio n. 22
0
def build_singularity_image(def_path):
    """Build Singularity container image by calling out to 'singularity' (requires admin privileges!)."""

    cont_path = container_path()
    def_file = os.path.basename(def_path)

    # use --imagename if specified, otherwise derive based on filename of recipe
    img_name = build_option('container_image_name')
    if img_name is None:
        # definition file Singularity.<app>-<version, container name <app>-<version>.<img|simg>
        img_name = def_file.split('.', 1)[1]

    cmd_opts = ''

    image_format = build_option('container_image_format')

    # squashfs image format (default for Singularity)
    if image_format in [None, CONT_IMAGE_FORMAT_SQUASHFS]:
        img_path = os.path.join(cont_path, img_name + '.simg')

    # ext3 image format, creating as writable container
    elif image_format == CONT_IMAGE_FORMAT_EXT3:
        img_path = os.path.join(cont_path, img_name + '.img')
        cmd_opts = '--writable'

    # sandbox image format, creates as a directory but acts like a container
    elif image_format == CONT_IMAGE_FORMAT_SANDBOX:
        img_path = os.path.join(cont_path, img_name)
        cmd_opts = '--sandbox'

    else:
        raise EasyBuildError("Unknown container image format specified for Singularity: %s" % image_format)

    if os.path.exists(img_path):
        if build_option('force'):
            print_msg("WARNING: overwriting existing container image at %s due to --force" % img_path)
            remove_file(img_path)
        else:
            raise EasyBuildError("Container image already exists at %s, not overwriting it without --force", img_path)

    # resolve full path to 'singularity' binary, since it may not be available via $PATH under sudo...
    singularity = which('singularity')
    cmd_env = ''

    singularity_tmpdir = build_option('container_tmpdir')
    if singularity_tmpdir:
        cmd_env += 'SINGULARITY_TMPDIR=%s' % singularity_tmpdir

    cmd = ' '.join(['sudo', cmd_env, singularity, 'build', cmd_opts, img_path, def_path])
    print_msg("Running '%s', you may need to enter your 'sudo' password..." % cmd)
    run_cmd(cmd, stream_output=True)
    print_msg("Singularity image created at %s" % img_path, log=_log)
Esempio n. 23
0
    def _set_fftw_variables(self):
        if not hasattr(self, 'BLAS_LIB_DIR'):
            raise EasyBuildError("_set_fftw_variables: IntelFFT based on IntelMKL (no BLAS_LIB_DIR found)")

        imklver = get_software_version(self.FFT_MODULE_NAME[0])

        picsuff = ''
        if self.options.get('pic', None):
            picsuff = '_pic'
        bitsuff = '_lp64'
        if self.options.get('i8', None):
            bitsuff = '_ilp64'
        compsuff = '_intel'
        if get_software_root('icc') is None:
            if get_software_root('GCC'):
                compsuff = '_gnu'
            else:
                raise EasyBuildError("Not using Intel compilers or GCC, don't know compiler suffix for FFTW libraries.")

        fftw_libs = ["fftw3xc%s%s" % (compsuff, picsuff)]
        if self.options['usempi']:
            # add cluster interface for recent imkl versions
            if LooseVersion(imklver) >= LooseVersion("11.0.2"):
                fftw_libs.append("fftw3x_cdft%s%s" % (bitsuff, picsuff))
            elif LooseVersion(imklver) >= LooseVersion("10.3"):
                fftw_libs.append("fftw3x_cdft%s" % picsuff)
            fftw_libs.append("mkl_cdft_core")  # add cluster dft
            fftw_libs.extend(self.variables['LIBBLACS'].flatten()) # add BLACS; use flatten because ListOfList

        self.log.debug('fftw_libs %s' % fftw_libs.__repr__())
        fftw_libs.extend(self.variables['LIBBLAS'].flatten())  # add BLAS libs (contains dft)
        self.log.debug('fftw_libs %s' % fftw_libs.__repr__())

        self.FFT_LIB_DIR = self.BLAS_LIB_DIR
        self.FFT_INCLUDE_DIR = self.BLAS_INCLUDE_DIR

        # building the FFTW interfaces is optional,
        # so make sure libraries are there before FFT_LIB is set
        imklroot = get_software_root(self.FFT_MODULE_NAME[0])
        fft_lib_dirs = [os.path.join(imklroot, d) for d in self.FFT_LIB_DIR]
        # filter out gfortran from list of FFTW libraries to check for, since it's not provided by imkl
        check_fftw_libs = [lib for lib in fftw_libs if lib != 'gfortran']
        fftw_lib_exists = lambda x: any([os.path.exists(os.path.join(d, "lib%s.a" % x)) for d in fft_lib_dirs])
        if all([fftw_lib_exists(lib) for lib in check_fftw_libs]):
            self.FFT_LIB = fftw_libs
        else:
            msg = "Not all FFTW interface libraries %s are found in %s" % (check_fftw_libs, fft_lib_dirs)
            msg += ", can't set $FFT_LIB."
            if build_option('extended_dry_run'):
                dry_run_warning(msg, silent=build_option('silent'))
            else:
                raise EasyBuildError(msg)
Esempio n. 24
0
    def prepare(self, onlymod=None, silent=False, loadmod=True, rpath_filter_dirs=None, rpath_include_dirs=None):
        """
        Prepare a set of environment parameters based on name/version of toolchain
        - load modules for toolchain and dependencies
        - generate extra variables and set them in the environment

        :param onlymod: boolean/string to indicate if the toolchain should only load the environment
                         with module (True) or also set all other variables (False) like compiler CC etc
                         (If string: comma separated list of variables that will be ignored).
        :param silent: keep quiet, or not (mostly relates to extended dry run output)
        :param loadmod: whether or not to (re)load the toolchain module, and the modules for the dependencies
        :param rpath_filter_dirs: extra directories to include in RPATH filter (e.g. build dir, tmpdir, ...)
        :param rpath_include_dirs: extra directories to include in RPATH
        """
        if loadmod:
            self._load_modules(silent=silent)

        if self.name != DUMMY_TOOLCHAIN_NAME:

            trace_msg("defining build environment for %s/%s toolchain" % (self.name, self.version))

            if not self.dry_run:
                self._verify_toolchain()

            # Generate the variables to be set
            self.set_variables()

            # set the variables
            # onlymod can be comma-separated string of variables not to be set
            if onlymod == True:
                self.log.debug("prepare: do not set additional variables onlymod=%s", onlymod)
                self.generate_vars()
            else:
                self.log.debug("prepare: set additional variables onlymod=%s", onlymod)

                # add LDFLAGS and CPPFLAGS from dependencies to self.vars
                self._add_dependency_variables()
                self.generate_vars()
                self._setenv_variables(onlymod, verbose=not silent)

        # consider f90cache first, since ccache can also wrap Fortran compilers
        for cache_tool in [F90CACHE, CCACHE]:
            if build_option('use_%s' % cache_tool):
                self.prepare_compiler_cache(cache_tool)

        if build_option('rpath'):
            if self.options.get('rpath', True):
                self.prepare_rpath_wrappers(rpath_filter_dirs, rpath_include_dirs)
                self.use_rpath = True
            else:
                self.log.info("Not putting RPATH wrappers in place, disabled via 'rpath' toolchain option")
Esempio n. 25
0
def singularity(easyconfigs, container_base=None):
    """
    Create Singularity definition file and (optionally) image
    """
    check_singularity()

    if container_base is None:
        container_base = build_option('container_base')

    def_path = generate_singularity_recipe(easyconfigs, container_base)

    # also build container image, if requested (requires sudo!)
    if build_option('container_build_image'):
        build_singularity_image(def_path)
def regtest(easyconfig_paths, build_specs=None):
    """
    Run regression test, using easyconfigs available in given path
    @param easyconfig_paths: path of easyconfigs to run regtest on
    @param build_specs: dictionary specifying build specifications (e.g. version, toolchain, ...)
    """

    cur_dir = os.getcwd()

    aggregate_regtest = build_option('aggregate_regtest')
    if aggregate_regtest is not None:
        output_file = os.path.join(aggregate_regtest, "%s-aggregate.xml" % os.path.basename(aggregate_regtest))
        aggregate_xml_in_dirs(aggregate_regtest, output_file)
        _log.info("aggregated xml files inside %s, output written to: %s" % (aggregate_regtest, output_file))
        sys.exit(0)

    # create base directory, which is used to place
    # all log files and the test output as xml
    basename = "easybuild-test-%s" % datetime.now().strftime("%Y%m%d%H%M%S")
    var = config.OLDSTYLE_ENVIRONMENT_VARIABLES['test_output_path']

    regtest_output_dir = build_option('regtest_output_dir')
    if regtest_output_dir is not None:
        output_dir = regtest_output_dir
    elif var in os.environ:
        output_dir = os.path.abspath(os.environ[var])
    else:
        # default: current dir + easybuild-test-[timestamp]
        output_dir = os.path.join(cur_dir, basename)

    mkdir(output_dir, parents=True)

    # find all easyconfigs
    ecfiles = []
    if easyconfig_paths:
        for path in easyconfig_paths:
            ecfiles += find_easyconfigs(path, ignore_dirs=build_option('ignore_dirs'))
    else:
        _log.error("No easyconfig paths specified.")

    test_results = []

    # process all the found easyconfig files
    easyconfigs = []
    for ecfile in ecfiles:
        try:
            easyconfigs.extend(process_easyconfig(ecfile, build_specs=build_specs))
        except EasyBuildError, err:
            test_results.append((ecfile, 'parsing_easyconfigs', 'easyconfig file error: %s' % err, _log))
Esempio n. 27
0
def write_file(path, txt, append=False, forced=False):
    """Write given contents to file at given path (overwrites current file contents!)."""

    # early exit in 'dry run' mode
    if not forced and build_option('extended_dry_run'):
        dry_run_msg("file written: %s" % path, silent=build_option('silent'))
        return

    # note: we can't use try-except-finally, because Python 2.4 doesn't support it as a single block
    try:
        mkdir(os.path.dirname(path), parents=True)
        with open(path, 'a' if append else 'w') as handle:
            handle.write(txt)
    except IOError, err:
        raise EasyBuildError("Failed to write to %s: %s", path, err)
def mkdir(path, parents=False, set_gid=None, sticky=None):
    """
    Create a directory
    Directory is the path to create

    @param parents: create parent directories if needed (mkdir -p)
    @param set_gid: set group ID bit, to make subdirectories and files inherit group
    @param sticky: set the sticky bit on this directory (a.k.a. the restricted deletion flag),
                   to avoid users can removing/renaming files in this directory
    """
    if set_gid is None:
        set_gid = build_option('set_gid_bit')
    if sticky is None:
        sticky = build_option('sticky_bit')

    if not os.path.isabs(path):
        path = os.path.abspath(path)

    # exit early if path already exists
    if not os.path.exists(path):
        tup = (path, parents, set_gid, sticky)
        _log.info("Creating directory %s (parents: %s, set_gid: %s, sticky: %s)" % tup)
        # set_gid and sticky bits are only set on new directories, so we need to determine the existing parent path
        existing_parent_path = os.path.dirname(path)
        try:
            if parents:
                # climb up until we hit an existing path or the empty string (for relative paths)
                while existing_parent_path and not os.path.exists(existing_parent_path):
                    existing_parent_path = os.path.dirname(existing_parent_path)
                os.makedirs(path)
            else:
                os.mkdir(path)
        except OSError, err:
            _log.error("Failed to create directory %s: %s" % (path, err))

        # set group ID and sticky bits, if desired
        bits = 0
        if set_gid:
            bits |= stat.S_ISGID
        if sticky:
            bits |= stat.S_ISVTX
        if bits:
            try:
                new_subdir = path[len(existing_parent_path):].lstrip(os.path.sep)
                new_path = os.path.join(existing_parent_path, new_subdir.split(os.path.sep)[0])
                adjust_permissions(new_path, bits, add=True, relative=True, recursive=True, onlydirs=True)
            except OSError, err:
                _log.error("Failed to set groud ID/sticky bit: %s" % err)
    def define_env_var(self, env_var):
        """
        Determine whether environment variable with specified name should be defined or not.

        :param env_var: name of environment variable to check
        """
        return env_var not in (build_option('filter_env_vars') or [])
Esempio n. 30
0
    def __init__(self, mself, ext):
        """
        mself has the logger
        """
        self.master = mself
        self.log = self.master.log
        self.cfg = self.master.cfg.copy()
        self.ext = copy.deepcopy(ext)
        self.dry_run = self.master.dry_run

        if not 'name' in self.ext:
            raise EasyBuildError("'name' is missing in supplied class instance 'ext'.")

        # parent sanity check paths/commands are not relevant for extension
        self.cfg['sanity_check_commands'] = []
        self.cfg['sanity_check_paths'] = []

        # list of source/patch files: we use an empty list as default value like in EasyBlock
        self.src = self.ext.get('src', [])
        self.patches = self.ext.get('patches', [])
        self.options = copy.deepcopy(self.ext.get('options', {}))

        # don't re-prepare the build environment when doing a dry run, since it'll be the same as for the parent
        if not build_option('extended_dry_run'):
            self.toolchain.prepare(onlymod=self.cfg['onlytcmod'], silent=True)

        self.sanity_check_fail_msgs = []
Esempio n. 31
0
    def test_step(self):
        """
        Custom test procedure for Molpro.
        Run 'make quicktest, make test', but only for source install and if license is available.
        """

        # Only bother to check if the licence token is available
        if os.path.isfile(
                self.license_token) and not self.cfg['precompiled_binaries']:

            # check 'main routes' only
            run_cmd("make quicktest")

            if build_option('mpi_tests'):
                # extensive test
                run_cmd("make MOLPRO_OPTIONS='-n%s' test" %
                        self.cfg['parallel'])
            else:
                self.log.info(
                    "Skipping extensive testing of Molpro since MPI testing is disabled"
                )
Esempio n. 32
0
    def check_module_path(self):
        """
        Check if MODULEPATH is set and change it if necessary.
        """
        # if self.mod_paths is not specified, define it and make sure the EasyBuild module path is in there (first)
        if self.mod_paths is None:
            # take (unique) module paths from environment
            self.set_mod_paths()
            self.log.debug("self.mod_paths set based on $MODULEPATH: %s" % self.mod_paths)

            # determine module path for EasyBuild install path to be included in $MODULEPATH
            eb_modpath = os.path.join(install_path(typ='modules'), build_option('suffix_modules_path'))

            # make sure EasyBuild module path is in 1st place
            self.prepend_module_path(eb_modpath)
            self.log.info("Prepended list of module paths with path used by EasyBuild: %s" % eb_modpath)

        # set the module path environment accordingly
        for mod_path in self.mod_paths[::-1]:
            self.use(mod_path)
        self.log.info("$MODULEPATH set based on list of module paths (via 'module use'): %s" % os.environ['MODULEPATH'])
Esempio n. 33
0
    def test_step(self):
        """Run GAMESS-US tests (if 'runtest' easyconfig parameter is set to True)."""
        # don't use provided 'runall' script for tests, since that only runs the tests single-core
        if self.cfg['runtest']:

            if not build_option('mpi_tests'):
                self.log.info("Skipping testing of GAMESS-US since MPI testing is disabled")
                return

            try:
                cwd = os.getcwd()
                os.chdir(self.testdir)
            except OSError, err:
                raise EasyBuildError("Failed to move to temporary directory for running tests: %s", err)

            # copy input files for exam<id> standard tests
            for test_input in glob.glob(os.path.join(self.installdir, 'tests', 'standard', 'exam*.inp')):
                try:
                    shutil.copy2(test_input, os.getcwd())
                except OSError, err:
                    raise EasyBuildError("Failed to copy %s to %s: %s", test_input, os.getcwd(), err)
    def prepare(self):
        """
        Creates the absolute filename for the module.
        """
        mod_path_suffix = build_option('suffix_modules_path')
        full_mod_name = self.app.full_mod_name
        # module file goes in general moduleclass category
        self.filename = os.path.join(self.module_path, mod_path_suffix, full_mod_name)
        # make symlink in moduleclass category
        mod_symlink_paths = ActiveMNS().det_module_symlink_paths(self.app.cfg)
        self.class_mod_files = [os.path.join(self.module_path, p, full_mod_name) for p in mod_symlink_paths]

        # create directories and links
        for path in [os.path.dirname(x) for x in [self.filename] + self.class_mod_files]:
            mkdir(path, parents=True)

        # remove module file if it's there (it'll be recreated), see Application.make_module
        if os.path.exists(self.filename):
            os.remove(self.filename)

        return os.path.join(self.module_path, mod_path_suffix)
Esempio n. 35
0
    def _load_toolchain_module(self, silent=False):
        """Load toolchain module."""

        tc_mod = self.det_short_module_name()

        if self.dry_run:
            dry_run_msg("Loading toolchain module...\n", silent=silent)

            # load toolchain module, or simulate load of toolchain components if it is not available
            if self.modules_tool.exist([tc_mod], skip_avail=True)[0]:
                self.modules_tool.load([tc_mod])
                dry_run_msg("module load %s" % tc_mod, silent=silent)
            else:
                # first simulate loads for toolchain dependencies, if required information is available
                if self.tcdeps is not None:
                    for tcdep in self.tcdeps:
                        modname = tcdep['short_mod_name']
                        dry_run_msg("module load %s [SIMULATED]" % modname, silent=silent)
                        # 'use '$EBROOTNAME' as value for dep install prefix (looks nice in dry run output)
                        deproot = '$%s' % get_software_root_env_var_name(tcdep['name'])
                        self._simulated_load_dependency_module(tcdep['name'], tcdep['version'], {'prefix': deproot})

                dry_run_msg("module load %s [SIMULATED]" % tc_mod, silent=silent)
                # use name of $EBROOT* env var as value for $EBROOT* env var (results in sensible dry run output)
                tcroot = '$%s' % get_software_root_env_var_name(self.name)
                self._simulated_load_dependency_module(self.name, self.version, {'prefix': tcroot})
        else:
            # make sure toolchain is available using short module name by running 'module use' on module path subdir
            if self.init_modpaths:
                mod_path_suffix = build_option('suffix_modules_path')
                for modpath in self.init_modpaths:
                    self.modules_tool.prepend_module_path(os.path.join(install_path('mod'), mod_path_suffix, modpath))

            # load modules for all dependencies
            self.log.debug("Loading module for toolchain: %s", tc_mod)
            trace_msg("loading toolchain module: " + tc_mod)
            self.modules_tool.load([tc_mod])

        # append toolchain module to list of modules
        self.modules.append(tc_mod)
Esempio n. 36
0
    def test_step(self):
        """Custom built-in test procedure for HPCG."""
        if self.cfg['runtest']:

            if not build_option('mpi_tests'):
                self.log.info(
                    "Skipping testing of HPCG since MPI testing is disabled")
                return

            objbindir = os.path.join(self.cfg['start_dir'], 'obj', 'bin')
            # obtain equivalent of 'mpirun -np 2 xhpcg'
            hpcg_mpi_cmd = self.toolchain.mpi_cmd_for("xhpcg", 2)
            # 2 threads per MPI process (4 threads in total)
            cmd = "PATH=%s:$PATH OMP_NUM_THREADS=2 %s" % (objbindir,
                                                          hpcg_mpi_cmd)
            run_cmd(cmd, simple=True, log_all=True, log_ok=True)

            # find log file, check for success
            success_regex = re.compile(r"Scaled Residual \[[0-9.e-]+\]")
            try:
                hpcg_logs = glob.glob('hpcg_log*txt')
                if len(hpcg_logs) == 1:
                    txt = open(hpcg_logs[0], 'r').read()
                    self.log.debug("Contents of HPCG log file %s: %s" %
                                   (hpcg_logs[0], txt))
                    if success_regex.search(txt):
                        self.log.info(
                            "Found pattern '%s' in HPCG log file %s, OK!",
                            success_regex.pattern, hpcg_logs[0])
                    else:
                        raise EasyBuildError(
                            "Failed to find pattern '%s' in HPCG log file %s",
                            success_regex.pattern, hpcg_logs[0])
                else:
                    raise EasyBuildError(
                        "Failed to find exactly one HPCG log file: %s",
                        hpcg_logs)
            except OSError, err:
                raise EasyBuildError(
                    "Failed to check for success in HPCG log file: %s", err)
Esempio n. 37
0
    def build_step(self, *args, **kwargs):
        """Custom build procedure for Python, ensure stack size limit is set to 'unlimited' (if desired)."""

        # make sure installation directory doesn't already exist when building with --rpath and
        # configuring with --enable-optimizations, since that leads to errors like:
        #   ./python: symbol lookup error: ./python: undefined symbol: __gcov_indirect_call
        # see also https://bugs.python.org/issue29712
        enable_opts_flag = '--enable-optimizations'
        if build_option('rpath') and enable_opts_flag in self.cfg['configopts']:
            if os.path.exists(self.installdir):
                warning_msg = "Removing existing installation directory '%s', "
                warning_msg += "because EasyBuild is configured to use RPATH linking "
                warning_msg += "and %s configure option is used." % enable_opts_flag
                print_warning(warning_msg % self.installdir)
                remove_dir(self.installdir)

        if self.cfg['ulimit_unlimited']:
            # determine current stack size limit
            (out, _) = run_cmd("ulimit -s")
            curr_ulimit_s = out.strip()

            # figure out hard limit for stack size limit;
            # this determines whether or not we can use "ulimit -s unlimited"
            (out, _) = run_cmd("ulimit -s -H")
            max_ulimit_s = out.strip()

            if curr_ulimit_s == UNLIMITED:
                self.log.info("Current stack size limit is %s: OK", curr_ulimit_s)
            elif max_ulimit_s == UNLIMITED:
                self.log.info("Current stack size limit is %s, setting it to %s for build...",
                              curr_ulimit_s, UNLIMITED)
                self.cfg.update('prebuildopts', "ulimit -s %s && " % UNLIMITED)
            else:
                msg = "Current stack size limit is %s, and can not be set to %s due to hard limit of %s;"
                msg += " setting stack size limit to %s instead, "
                msg += " this may break part of the compilation (e.g. hashlib)..."
                print_warning(msg % (curr_ulimit_s, UNLIMITED, max_ulimit_s, max_ulimit_s))
                self.cfg.update('prebuildopts', "ulimit -s %s && " % max_ulimit_s)

        super(EB_Python, self).build_step(*args, **kwargs)
Esempio n. 38
0
    def build_step(self):
        """Build and install WRF and testcases using provided compile script."""

        # enable parallel build
        par = self.cfg['parallel']
        self.par = ''
        if par:
            self.par = "-j %s" % par

        # fix compile script shebang to use provided tcsh
        cmpscript = os.path.join(self.start_dir, 'compile')
        tcsh_root = get_software_root('tcsh')
        if tcsh_root:
            tcsh_path = os.path.join(tcsh_root, 'bin', 'tcsh')
            # avoid using full path to tcsh if possible, since it may be too long to be used as shebang line
            which_tcsh = which('tcsh')
            if which_tcsh and os.path.samefile(which_tcsh, tcsh_path):
                env_path = os.path.join('/usr', 'bin', 'env')
                # use env command from alternate sysroot, if available
                sysroot = build_option('sysroot')
                if sysroot:
                    sysroot_env_path = os.path.join(sysroot, 'usr', 'bin',
                                                    'env')
                    if os.path.exists(sysroot_env_path):
                        env_path = sysroot_env_path
                new_shebang = env_path + ' tcsh'
            else:
                new_shebang = tcsh_path

            regex_subs = [('^#!/bin/csh.*', '#!' + new_shebang)]
            apply_regex_substitutions(cmpscript, regex_subs)

        # build wrf
        cmd = "%s %s wrf" % (cmpscript, self.par)
        run_cmd(cmd, log_all=True, simple=True, log_output=True)

        # build two testcases to produce ideal.exe and real.exe
        for test in ["em_real", "em_b_wave"]:
            cmd = "%s %s %s" % (cmpscript, self.par, test)
            run_cmd(cmd, log_all=True, simple=True, log_output=True)
Esempio n. 39
0
    def _set_compiler_flags(self):
        """Collect the flags set, and add them as variables too"""

        flags = [
            self.options.option(x) for x in self.COMPILER_FLAGS
            if self.options.get(x, False)
        ]
        cflags = [self.options.option(x) for x in self.COMPILER_C_FLAGS + self.COMPILER_C_UNIQUE_FLAGS \
                  if self.options.get(x, False)]
        fflags = [self.options.option(x) for x in self.COMPILER_F_FLAGS + self.COMPILER_F_UNIQUE_FLAGS \
                  if self.options.get(x, False)]

        # 1st one is the one to use. add default at the end so len is at least 1
        optflags = [self.options.option(x) for x in self.COMPILER_OPT_FLAGS if self.options.get(x, False)] + \
                   [self.options.option('defaultopt')]

        optarchflags = []
        if build_option('optarch') == OPTARCH_GENERIC:
            # don't take 'optarch' toolchain option into account when --optarch=GENERIC is used,
            # *always* include the flags that correspond to generic compilation (which are listed in 'optarch' option)
            optarchflags.append(self.options.option('optarch'))
        elif self.options.get('optarch', False):
            optarchflags.append(self.options.option('optarch'))

        precflags = [self.options.option(x) for x in self.COMPILER_PREC_FLAGS if self.options.get(x, False)] + \
                    [self.options.option('defaultprec')]

        self.variables.nextend('OPTFLAGS', optflags[:1] + optarchflags)
        self.variables.nextend('PRECFLAGS', precflags[:1])

        # precflags last
        for var in ['CFLAGS', 'CXXFLAGS']:
            self.variables.nappend(var, flags)
            self.variables.nappend(var, cflags)
            self.variables.join(var, 'OPTFLAGS', 'PRECFLAGS')

        for var in ['FCFLAGS', 'FFLAGS', 'F90FLAGS']:
            self.variables.nappend(var, flags)
            self.variables.nappend(var, fflags)
            self.variables.join(var, 'OPTFLAGS', 'PRECFLAGS')
Esempio n. 40
0
def github_api_get_request(request_f, github_user=None, token=None, **kwargs):
    """
    Helper method, for performing get requests to GitHub API.
    :param request_f: function that should be called to compose request, providing a RestClient instance
    :param github_user: GitHub user name (to try and obtain matching GitHub token if none is provided)
    :param token: GitHub token to use
    @return: tuple with return status and data
    """
    if github_user is None:
        github_user = build_option('github_user')

    if token is None:
        token = fetch_github_token(github_user)

    url = request_f(
        RestClient(GITHUB_API_URL, username=github_user, token=token))

    try:
        status, data = url.get(**kwargs)
    except socket.gaierror, err:
        _log.warning("Error occured while performing get request: %s" % err)
        status, data = 0, None
Esempio n. 41
0
    def __init__(self, mod_paths=None):
        """
        Create a ModulesTool object
        @param mod_paths: A list of paths where the modules can be located
        @type mod_paths: list
        """

        self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
        self.mod_paths = None
        if mod_paths is not None:
            self.set_mod_paths(mod_paths)

        # DEPRECATED!
        self._modules = []

        # actual module command (i.e., not the 'module' wrapper function, but the binary)
        self.cmd = self.COMMAND
        if self.COMMAND_ENVIRONMENT is not None and self.COMMAND_ENVIRONMENT in os.environ:
            self.log.debug('Set command via environment variable %s' %
                           self.COMMAND_ENVIRONMENT)
            self.cmd = os.environ[self.COMMAND_ENVIRONMENT]

        if self.cmd is None:
            self.log.error('No command set.')
        else:
            self.log.debug('Using command %s' % self.cmd)

        # version of modules tool
        self.version = None

        # some initialisation/verification
        self.check_cmd_avail()
        self.check_module_path()
        self.check_module_function(
            allow_mismatch=build_option('allow_modules_tool_mismatch'))
        self.set_and_check_version()

        # this can/should be set to True during testing
        self.testing = False
Esempio n. 42
0
    def find_starccm_subdirs(self):
        """Determine subdirectory of install directory in which STAR-CCM+ was installed."""
        cwd = change_dir(self.installdir)
        cands = glob.glob(
            os.path.join(self.version + '*', 'STAR-CCM+%s*' % self.version))
        if len(cands) == 1:
            self.starccm_subdir = cands[0]
            self.log.info("Found STAR-CCM+ subdirectory: %s",
                          self.starccm_subdir)
            self.starview_subdir = os.path.join(
                os.path.dirname(self.starccm_subdir),
                'STAR-View+%s' % self.version)
        elif self.dry_run or build_option('module_only'):
            # Provide fake string values if executing a dry-run or module-only build
            self.starccm_subdir = ''
            self.starview_subdir = ''
        else:
            raise EasyBuildError(
                "Failed to determine the STAR-CCM+ subdirectory in %s: %s",
                self.installdir, cands)

        change_dir(cwd)
    def load_module(self, mod_name, recursive_unload=False, unload_modules=None):
        """
        Generate load statement for specified module.

        :param mod_name: name of module to generate load statement for
        :param recursive_unload: boolean indicating whether the 'load' statement should be reverted on unload
        :param unload_module: name(s) of module to unload first
        """
        body = []
        if unload_modules:
            body.extend([self.unload_module(m).strip() for m in unload_modules])
        body.append(self.LOAD_TEMPLATE)

        if build_option('recursive_mod_unload') or recursive_unload:
            # not wrapping the 'module load' with an is-loaded guard ensures recursive unloading;
            # when "module unload" is called on the module in which the dependency "module load" is present,
            # it will get translated to "module unload"
            load_statement = body + ['']
        else:
            load_statement = [self.conditional_statement("is-loaded %(mod_name)s", '\n'.join(body), negative=True)]

        return '\n'.join([''] + load_statement) % {'mod_name': mod_name}
Esempio n. 44
0
def run_hook(label,
             hooks,
             pre_step_hook=False,
             post_step_hook=False,
             args=None,
             msg=None):
    """
    Run hook with specified label and return result of calling the hook or None.

    :param label: name of hook
    :param hooks: dict of defined hooks
    :param pre_step_hook: indicates whether hook to run is a pre-step hook
    :param post_step_hook: indicates whether hook to run is a post-step hook
    :param args: arguments to pass to hook function
    :param msg: custom message that is printed when hook is called
    """
    hook = find_hook(label,
                     hooks,
                     pre_step_hook=pre_step_hook,
                     post_step_hook=post_step_hook)
    res = None
    if hook:
        if args is None:
            args = []

        if pre_step_hook:
            label = 'pre-' + label
        elif post_step_hook:
            label = 'post-' + label

        if msg is None:
            msg = "Running %s hook..." % label
        if build_option('debug'):
            print_msg(msg)

        _log.info("Running '%s' hook function (arguments: %s)...",
                  hook.__name__, args)
        res = hook(*args)
    return res
Esempio n. 45
0
    def test_use_rich_show_progress_bars(self):
        """Test use_rich and show_progress_bar functions."""

        # restore default configuration to show progress bars (disabled to avoid mangled test output)
        update_build_option('show_progress_bar', True)

        self.assertEqual(build_option('output_style'), 'auto')

        if HAVE_RICH:
            self.assertTrue(use_rich())
            self.assertTrue(show_progress_bars())

            update_build_option('output_style', 'rich')
            self.assertTrue(use_rich())
            self.assertTrue(show_progress_bars())
        else:
            self.assertFalse(use_rich())
            self.assertFalse(show_progress_bars())

        update_build_option('output_style', 'basic')
        self.assertFalse(use_rich())
        self.assertFalse(show_progress_bars())
    def dependencies(self):
        """
        Returns an array of parsed dependencies (after filtering, if requested)
        dependency = {'name': '', 'version': '', 'dummy': (False|True), 'versionsuffix': '', 'toolchain': ''}
        """
        deps = self['dependencies'] + self.builddependencies()

        # if filter-deps option is provided we "clean" the list of dependencies for
        # each processed easyconfig to remove the unwanted dependencies
        filter_deps = build_option('filter_deps')
        if filter_deps:
            self.log.debug("Dependencies BEFORE filtering: %s" % deps)
            filtered_deps = []
            for dep in deps:
                if dep['name'] not in filter_deps:
                    filtered_deps.append(dep)
                else:
                    self.log.info("filtered out dependency %s" % dep)
            self.log.debug("Dependencies AFTER filtering: %s" % filtered_deps)
            deps = filtered_deps

        return deps
Esempio n. 47
0
def check_singularity():
    """Check whether Singularity can be used (if it's needed)."""
    # if we're going to build a container image, we'll need a sufficiently recent version of Singularity available
    # (and otherwise we don't really care if Singularity is not available)

    if build_option('container_build_image'):
        path_to_singularity_cmd = which('singularity')
        if path_to_singularity_cmd:
            print_msg("Singularity tool found at %s" % path_to_singularity_cmd)
            out, ec = run_cmd("singularity --version", simple=False, trace=False, force_in_dry_run=True)
            if ec:
                raise EasyBuildError("Failed to determine Singularity version: %s" % out)
            else:
                # singularity version format for 2.3.1 and higher is x.y-dist
                singularity_version = out.strip().split('-')[0]

            if LooseVersion(singularity_version) < LooseVersion('2.4'):
                raise EasyBuildError("Please upgrade singularity instance to version 2.4 or higher")
            else:
                print_msg("Singularity version '%s' is 2.4 or higher ... OK" % singularity_version)
        else:
            raise EasyBuildError("Singularity not found in your system")
Esempio n. 48
0
    def __init__(self, *args, **kwargs):
        """Initialisation of custom class variables for ELPA."""
        super(EB_ELPA, self).__init__(*args, **kwargs)

        for flag in ELPA_CPU_FEATURE_FLAGS:
            # fail-safe: make sure we're not overwriting an existing attribute (could lead to weird bugs if we do)
            if hasattr(self, flag):
                raise EasyBuildError("EasyBlock attribute '%s' already exists")
            setattr(self, flag, self.cfg['use_%s' % flag])

        # auto-detect CPU features that can be used and are not enabled/disabled explicitly,
        # but only if --optarch=GENERIC is not being used
        if self.cfg['auto_detect_cpu_features']:

            # if --optarch=GENERIC is used, we will not use no CPU feature
            if build_option('optarch') == OPTARCH_GENERIC:
                cpu_features = []
            else:
                cpu_features = ELPA_CPU_FEATURE_FLAGS
            self.log.info("CPU features considered for auto-detection: %s",
                          cpu_features)

            # get list of available CPU features, so we can check which ones to retain
            avail_cpu_features = get_cpu_features()

            # on macOS, AVX is indicated with 'avx1.0' rather than 'avx'
            if 'avx1.0' in avail_cpu_features:
                avail_cpu_features.append('avx')

            self.log.info("List of available CPU features: %s",
                          avail_cpu_features)

            for flag in cpu_features:
                # only enable use of a particular CPU feature if it's still undecided (i.e. None)
                if getattr(self, flag) is None and flag in avail_cpu_features:
                    self.log.info(
                        "Enabling use of %s (should be supported based on CPU features)",
                        flag.upper())
                    setattr(self, flag, True)
Esempio n. 49
0
    def prepare_compiler_cache(self, cache_tool):
        """
        Prepare for using specified compiler caching tool (e.g., ccache, f90cache)

        :param cache_tool: name of compiler caching tool to prepare for
        """
        compilers = self.comp_cache_compilers(cache_tool)
        self.log.debug("Using compiler cache tool '%s' for compilers: %s", cache_tool, compilers)

        # set paths that should be used by compiler caching tool
        comp_cache_path = build_option('use_%s' % cache_tool)
        setvar('%s_DIR' % cache_tool.upper(), comp_cache_path)
        setvar('%s_TEMPDIR' % cache_tool.upper(), tempfile.mkdtemp())

        cache_path = which(cache_tool)
        if cache_path is None:
            raise EasyBuildError("%s binary not found in $PATH, required by --use-compiler-cache", cache)
        else:
            self.symlink_commands({cache_tool: (cache_path, compilers)})

        self.cached_compilers.update(compilers)
        self.log.debug("Cached compilers (after preparing for %s): %s", cache_tool, self.cached_compilers)
def check_python_version():
    """Check currently used Python version."""
    python_maj_ver = sys.version_info[0]
    python_min_ver = sys.version_info[1]
    python_ver = '%d.%d' % (python_maj_ver, python_min_ver)
    _log.info("Found Python version %s", python_ver)

    silence_deprecation_warnings = build_option(
        'silence_deprecation_warnings') or []

    if python_maj_ver == 2:
        if python_min_ver < 6:
            raise EasyBuildError(
                "Python 2.6 or higher is required when using Python 2, found Python %s",
                python_ver)
        elif python_min_ver == 6:
            depr_msg = "Running EasyBuild with Python 2.6 is deprecated"
            if 'Python26' in silence_deprecation_warnings:
                _log.warning(depr_msg)
            else:
                _log.deprecated(depr_msg, '5.0')
        else:
            _log.info("Running EasyBuild with Python 2 (version %s)",
                      python_ver)

    elif python_maj_ver == 3:
        if python_min_ver < 5:
            raise EasyBuildError(
                "Python 3.5 or higher is required when using Python 3, found Python %s",
                python_ver)
        else:
            _log.info("Running EasyBuild with Python 3 (version %s)",
                      python_ver)
    else:
        raise EasyBuildError(
            "EasyBuild is not compatible (yet) with Python %s", python_ver)

    return (python_maj_ver, python_min_ver)
Esempio n. 51
0
    def get_gromacs_arch(self):
        """Determine value of GMX_SIMD CMake flag based on optarch string.

        Refs:
        [0] http://manual.gromacs.org/documentation/2016.3/install-guide/index.html#typical-installation
        [1] http://manual.gromacs.org/documentation/2016.3/install-guide/index.html#simd-support
        [2] http://www.gromacs.org/Documentation/Acceleration_and_parallelization
        """
        # default: fall back on autodetection
        res = None

        optarch = build_option('optarch') or ''
        # take into account that optarch value is a dictionary if it is specified by compiler family
        if isinstance(optarch, dict):
            comp_fam = self.toolchain.comp_family()
            optarch = optarch.get(comp_fam, '')
        optarch = optarch.upper()

        if 'MIC-AVX512' in optarch and LooseVersion(self.version) >= LooseVersion('2016'):
            res = 'AVX_512_KNL'
        elif 'AVX512' in optarch and LooseVersion(self.version) >= LooseVersion('2016'):
            res = 'AVX_512'
        elif 'AVX2' in optarch and LooseVersion(self.version) >= LooseVersion('5.0'):
            res = 'AVX2_256'
        elif 'AVX' in optarch:
            res = 'AVX_256'
        elif 'SSE3' in optarch or 'SSE2' in optarch or 'MARCH=NOCONA' in optarch:
            # Gromacs doesn't have any GMX_SIMD=SSE3 but only SSE2 and SSE4.1 [1].
            # According to [2] the performance difference between SSE2 and SSE4.1 is minor on x86
            # and SSE4.1 is not supported by AMD Magny-Cours[1].
            res = 'SSE2'

        if res:
            self.log.info("Target architecture based on optarch configuration option ('%s'): %s", optarch, res)
        else:
            self.log.info("No target architecture specified based on optarch configuration option ('%s')", optarch)

        return res
Esempio n. 52
0
def det_full_module_name(ec, eb_ns=False):
    """
    Determine full module name following the currently active module naming scheme.

    First try to pass 'parsed' easyconfig as supplied,
    try and find a matching easyconfig file, parse it and supply it in case of a KeyError.
    """
    try:
        mod_name = _det_full_module_name(ec, eb_ns=eb_ns)

    except KeyError, err:
        _log.debug(
            "KeyError '%s' when determining module name for %s, trying fallback procedure..."
            % (err, ec))
        # for dependencies, only name/version/versionsuffix/toolchain easyconfig parameters are available;
        # when a key error occurs, try and find an easyconfig file to parse via the robot,
        # and retry with the parsed easyconfig file (which will contains a full set of keys)
        robot = build_option('robot_path')
        eb_file = robot_find_easyconfig(robot, ec['name'],
                                        det_full_ec_version(ec))
        if eb_file is None:
            _log.error(
                "Failed to find an easyconfig file when determining module name for: %s"
                % ec)
        else:
            parsed_ec = process_easyconfig(eb_file)
            if len(parsed_ec) > 1:
                _log.warning(
                    "More than one parsed easyconfig obtained from %s, only retaining first"
                    % eb_file)
            try:
                mod_name = _det_full_module_name(parsed_ec[0]['ec'],
                                                 eb_ns=eb_ns)
            except KeyError, err:
                _log.error(
                    "A KeyError '%s' occured when determining a module name for %s."
                    % parsed_ec['ec'])
Esempio n. 53
0
def match_minimum_tc_specs(source_tc_spec, target_tc_hierarchy):
    """
    Match a source toolchain spec to the minimal corresponding toolchain in a target hierarchy

    :param source_tc_spec: specs of source toolchain
    :param target_tc_hierarchy: hierarchy of specs for target toolchain
    """
    minimal_matching_toolchain = {}
    target_compiler_family = ''

    # break out once we've found the first match since the hierarchy is ordered low to high in terms of capabilities
    for target_tc_spec in target_tc_hierarchy:
        if check_capability_mapping(source_tc_spec, target_tc_spec):
            # GCCcore has compiler capabilities,
            # but should only be used in the target if the original toolchain was also GCCcore
            if target_tc_spec['name'] != GCCcore.NAME or source_tc_spec[
                    'name'] == GCCcore.NAME:
                minimal_matching_toolchain = {
                    'name': target_tc_spec['name'],
                    'version': target_tc_spec['version']
                }
                target_compiler_family = target_tc_spec['comp_family']
                break

    if not minimal_matching_toolchain:
        raise EasyBuildError(
            "No possible mapping from source toolchain spec %s to target toolchain hierarchy specs %s",
            source_tc_spec, target_tc_hierarchy)

    # Warn if we are changing compiler families, this is very likely to cause problems
    if target_compiler_family != source_tc_spec['comp_family']:
        print_warning(
            "Your request will result in a compiler family switch (%s to %s). Here be dragons!"
            % (source_tc_spec['comp_family'], target_compiler_family),
            silent=build_option('silent'))

    return minimal_matching_toolchain
Esempio n. 54
0
    def test_step(self):
        """Run NAMD test case."""
        if self.cfg['runtest']:

            if not build_option('mpi_tests'):
                self.log.info(
                    "Skipping testing of NAMD since MPI testing is disabled")
                return

            namdcmd = os.path.join(self.cfg['start_dir'], self.namd_arch,
                                   'namd%s' % self.version.split('.')[0])
            if self.cfg['charm_arch'].startswith('mpi'):
                namdcmd = self.toolchain.mpi_cmd_for(namdcmd, 2)
            ppn = ''
            if self.toolchain.options.get('openmp', False):
                ppn = '+ppn 2'
            cmd = "%(namd)s %(ppn)s %(testdir)s" % {
                'namd':
                namdcmd,
                'ppn':
                ppn,
                'testdir':
                os.path.join(self.cfg['start_dir'], self.namd_arch, 'src',
                             'alanin'),
            }
            out, ec = run_cmd(cmd, simple=False)
            if ec == 0:
                test_ok_regex = re.compile(
                    "(^Program finished.$|End of program\s*$)", re.M)
                if test_ok_regex.search(out):
                    self.log.debug("Test '%s' ran fine." % cmd)
                else:
                    raise EasyBuildError(
                        "Test '%s' failed ('%s' not found), output: %s", cmd,
                        test_ok_regex.pattern, out)
        else:
            self.log.debug("Skipping running NAMD test case after building")
Esempio n. 55
0
    def __init__(self, script, name, env_vars=None, hours=None, cores=None):
        """Create a new Job to be submitted to SLURM."""
        self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)

        self.jobid = None
        self.script = script
        self.name = name

        self.job_specs = {
            'job-name': self.name,
            # pattern for output file for submitted job;
            # SLURM replaces %x with job name, %j with job ID (see https://slurm.schedmd.com/sbatch.html#lbAF)
            'output': '%x-%j.out',
            'wrap': self.script,
        }

        if env_vars:
            self.job_specs['export'] = ','.join(sorted(env_vars.keys()))

        max_walltime = build_option('job_max_walltime')
        if hours is None:
            hours = max_walltime
        if hours > max_walltime:
            self.log.warn(
                "Specified %s hours, but this is impossible. (resetting to %s hours)"
                % (hours, max_walltime))
            hours = max_walltime
        self.job_specs['time'] = hours * 60

        if cores:
            self.job_specs['nodes'] = 1
            self.job_specs['ntasks'] = cores
            self.job_specs['ntasks-per-node'] = cores
        else:
            self.log.warn(
                "Number of cores to request not specified, falling back to whatever Slurm does by default"
            )
Esempio n. 56
0
    def run(self, *args, **kwargs):
        """Perform the actual Python package build/installation procedure"""

        if not self.src:
            raise EasyBuildError(
                "No source found for Python package %s, required for installation. (src: %s)",
                self.name, self.src)
        # we unpack unless explicitly told otherwise
        kwargs.setdefault('unpack_src', self._should_unpack_source())
        super(PythonPackage, self).run(*args, **kwargs)

        # configure, build, test, install
        # See EasyBlock.get_steps
        steps = [
            (CONFIGURE_STEP, 'configuring', [lambda x: x.configure_step],
             True),
            (BUILD_STEP, 'building', [lambda x: x.build_step], True),
            (TEST_STEP, 'testing', [lambda x: x._test_step], True),
            (INSTALL_STEP, "installing", [lambda x: x.install_step], True),
        ]
        self.skip = False  # --skip does not apply here
        self.silent = build_option('silent')
        # See EasyBlock.run_all_steps
        for (step_name, descr, step_methods, skippable) in steps:
            if self.skip_step(step_name, skippable):
                print_msg("\t%s [skipped]" % descr,
                          log=self.log,
                          silent=self.silent)
            else:
                if self.dry_run:
                    self.dry_run_msg("\t%s... [DRY RUN]\n", descr)
                else:
                    print_msg("\t%s..." % descr,
                              log=self.log,
                              silent=self.silent)
                    for step_method in step_methods:
                        step_method(self)()
Esempio n. 57
0
    def update(self):
        """Update after new modules were added."""
        if build_option('update_modules_tool_cache'):
            spider_cmd = os.path.join(os.path.dirname(self.cmd), 'spider')
            cmd = [spider_cmd, '-o', 'moduleT', os.environ['MODULEPATH']]
            self.log.debug("Running command '%s'..." % ' '.join(cmd))

            proc = subprocess.Popen(cmd,
                                    stdout=PIPE,
                                    stderr=PIPE,
                                    env=os.environ)
            (stdout, stderr) = proc.communicate()

            if stderr:
                raise EasyBuildError("An error occured when running '%s': %s",
                                     ' '.join(cmd), stderr)

            if self.testing:
                # don't actually update local cache when testing, just return the cache contents
                return stdout
            else:
                try:
                    cache_fp = os.path.join(self.USER_CACHE_DIR, 'moduleT.lua')
                    self.log.debug(
                        "Updating Lmod spider cache %s with output from '%s'" %
                        (cache_fp, ' '.join(cmd)))
                    cache_dir = os.path.dirname(cache_fp)
                    if not os.path.exists(cache_dir):
                        mkdir(cache_dir, parents=True)
                    cache_file = open(cache_fp, 'w')
                    cache_file.write(stdout)
                    cache_file.close()
                except (IOError, OSError), err:
                    raise EasyBuildError(
                        "Failed to update Lmod spider cache %s: %s", cache_fp,
                        err)
    def test_step(self):
        """Run tests using each of the backends."""

        if build_option('rpath'):
            # inject build location of libflexiblas.so.3 to $LD_LIBRARY_PATH
            # (which is used as a fallback to find libraries not found via the locations listed in the RPATH section),
            # to ensure that test binaries can find the FlexiBLAS library
            ld_library_path = ':'.join([
                os.path.join(self.obj_builddir, 'lib'),
                os.path.join(self.obj_builddir, 'lib64'),
                '$LD_LIBRARY_PATH'
            ])
            self.cfg['pretestopts'] = ('export LD_LIBRARY_PATH="%s" && ' % ld_library_path) + self.cfg['pretestopts']

        # Show verbose output if a test fails
        setvar('CTEST_OUTPUT_ON_FAILURE', 'True')

        # run tests with default backend (NETLIB)
        test_cmd = ' '.join([
            self.cfg['pretestopts'],
            "make test",
            self.cfg['testopts'],
        ])
        run_cmd(test_cmd)
    def __init__(self, *args, **kwargs):
        """Constructor."""

        # early check for required commands
        for cmd in ['sbatch', 'scontrol']:
            path = which(cmd)
            if path is None:
                raise EasyBuildError("Required command '%s' not found", cmd)

        super(Slurm, self).__init__(*args, **kwargs)

        job_deps_type = build_option('job_deps_type')
        if job_deps_type is None:
            job_deps_type = JOB_DEPS_TYPE_ABORT_ON_ERROR
            self.log.info("Using default job dependency type: %s", job_deps_type)
        else:
            self.log.info("Using specified job dependency type: %s", job_deps_type)

        if job_deps_type == JOB_DEPS_TYPE_ABORT_ON_ERROR:
            self.job_deps_type = 'afterok'
        elif job_deps_type == JOB_DEPS_TYPE_ALWAYS_RUN:
            self.job_deps_type = 'afterany'
        else:
            raise EasyBuildError("Unknown job dependency type specified: %s", job_deps_type)
def robot_find_easyconfig(name, version):
    """
    Find an easyconfig for module in path
    """
    key = (name, version)
    if key in _easyconfig_files_cache:
        _log.debug("Obtained easyconfig path from cache for %s: %s" % (key, _easyconfig_files_cache[key]))
        return _easyconfig_files_cache[key]
    paths = build_option('robot_path')
    if not isinstance(paths, (list, tuple)):
        if paths is None:
            _log.error("No robot path specified, which is required when looking for easyconfigs (use --robot)")
        paths = [paths]
    # candidate easyconfig paths
    for path in paths:
        easyconfigs_paths = create_paths(path, name, version)
        for easyconfig_path in easyconfigs_paths:
            _log.debug("Checking easyconfig path %s" % easyconfig_path)
            if os.path.isfile(easyconfig_path):
                _log.debug("Found easyconfig file for name %s, version %s at %s" % (name, version, easyconfig_path))
                _easyconfig_files_cache[key] = os.path.abspath(easyconfig_path)
                return _easyconfig_files_cache[key]

    return None