def search_easyconfigs(query, short=False): """Search for easyconfigs, if a query is provided.""" robot_path = build_option("robot_path") if robot_path: search_path = robot_path else: search_path = [os.getcwd()] ignore_dirs = build_option("ignore_dirs") silent = build_option("silent") search_file(search_path, query, short=short, ignore_dirs=ignore_dirs, silent=silent)
def search_easyconfigs(query, short=False, filename_only=False, terse=False): """Search for easyconfigs, if a query is provided.""" robot_path = build_option('robot_path') if robot_path: search_path = robot_path else: search_path = [os.getcwd()] ignore_dirs = build_option('ignore_dirs') silent = build_option('silent') search_file(search_path, query, short=short, ignore_dirs=ignore_dirs, silent=silent, filename_only=filename_only, terse=terse)
def search_easyconfigs(query, short=False, filename_only=False, terse=False): """Search for easyconfigs, if a query is provided.""" search_path = build_option('robot_path') if not search_path: search_path = [os.getcwd()] extra_search_paths = build_option('search_paths') if extra_search_paths: search_path.extend(extra_search_paths) ignore_dirs = build_option('ignore_dirs') # note: don't pass down 'filename_only' here, we need the full path to filter out archived easyconfigs var_defs, _hits = search_file(search_path, query, short=short, ignore_dirs=ignore_dirs, terse=terse, silent=True, filename_only=False) # filter out archived easyconfigs, these are handled separately hits, archived_hits = [], [] for hit in _hits: if EASYCONFIGS_ARCHIVE_DIR in hit.split(os.path.sep): archived_hits.append(hit) else: hits.append(hit) # check whether only filenames should be printed if filename_only: hits = [os.path.basename(hit) for hit in hits] archived_hits = [os.path.basename(hit) for hit in archived_hits] # prepare output format if terse: lines, tmpl = [], '%s' else: lines = ['%s=%s' % var_def for var_def in var_defs] tmpl = ' * %s' # non-archived hits are shown first lines.extend(tmpl % hit for hit in hits) # also take into account archived hits if archived_hits: if build_option('consider_archived_easyconfigs'): if not terse: lines.extend(['', "Matching archived easyconfigs:", '']) lines.extend(tmpl % hit for hit in archived_hits) elif not terse: cnt = len(archived_hits) lines.extend([ '', "Note: %d matching archived easyconfig(s) found, use --consider-archived-easyconfigs to see them" % cnt, ]) print '\n'.join(lines)
def search_easyconfigs(query, short=False, filename_only=False, terse=False): """Search for easyconfigs, if a query is provided.""" search_path = build_option("robot_path") if not search_path: search_path = [os.getcwd()] ignore_dirs = build_option("ignore_dirs") # note: don't pass down 'filename_only' here, we need the full path to filter out archived easyconfigs var_defs, _hits = search_file( search_path, query, short=short, ignore_dirs=ignore_dirs, terse=terse, silent=True, filename_only=False ) # filter out archived easyconfigs, these are handled separately hits, archived_hits = [], [] for hit in _hits: if EASYCONFIGS_ARCHIVE_DIR in hit.split(os.path.sep): archived_hits.append(hit) else: hits.append(hit) # check whether only filenames should be printed if filename_only: hits = [os.path.basename(hit) for hit in hits] archived_hits = [os.path.basename(hit) for hit in archived_hits] # prepare output format if terse: lines, tmpl = [], "%s" else: lines = ["%s=%s" % var_def for var_def in var_defs] tmpl = " * %s" # non-archived hits are shown first lines.extend(tmpl % hit for hit in hits) # also take into account archived hits if archived_hits: if build_option("consider_archived_easyconfigs"): if not terse: lines.extend(["", "Matching archived easyconfigs:", ""]) lines.extend(tmpl % hit for hit in archived_hits) elif not terse: cnt = len(archived_hits) lines.extend( [ "", "Note: %d matching archived easyconfig(s) found, use --consider-archived-easyconfigs to see them" % cnt, ] ) print "\n".join(lines)
def prepare_step(self, *args, **kwargs): """Determine MPI prefix path, MPI version and any required envvars.""" # Keep track of original values of vars that are subject to change self.orig_installdir = self.installdir self.orig_version = self.cfg['version'] # Use easyconfig name parameter to determine target MPI type self.mpi_name = self.cfg['name'].lower() # Ensure that MPI exists within the root path specified in mpi_install_path # and extract MPI-specific information if self.mpi_name == 'openmpi': # For HPC-X OpenMPI, ensure one and only one HPC-X init script exists and save its path hpcx_init_filename = 'hpcx-init.sh' _, hits = search_file([self.cfg['mpi_install_path']], hpcx_init_filename) if not hits: raise EasyBuildError("No %s script recursively found in %s", hpcx_init_filename, self.cfg['mpi_install_path']) if len(hits) > 1: raise EasyBuildError( "Multiple %s scripts recursively found in %s", hpcx_init_filename, self.cfg['mpi_install_path']) else: self.hpcx_init = hits[0] self.log.info("Found HPC-X init script: %s" % self.hpcx_init) # Get the HPC-X prefix from init script absolute path self.hpcx_dir = os.path.dirname(self.hpcx_init) # Find OpenMPI version from ompi_info output ompi_info_out, ec = run_cmd('source %s && hpcx_load && ompi_info' % self.hpcx_init, simple=False) if ec: raise EasyBuildError( "Failed to initialize HPC-X and run ompi_info: %s", ompi_info_out) else: self.mpi_version = self.extract_ompi_setting( 'Open MPI', ompi_info_out) self.log.info("Found OpenMPI version: %s", self.mpi_version) else: raise EasyBuildError("Unrecognized MPI type: %s", self.mpi_name)
def test_search_file(self): """Test search_file function.""" test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') # check for default semantics, test case-insensitivity var_defs, hits = ft.search_file([test_ecs], 'HWLOC', silent=True) self.assertEqual(var_defs, []) self.assertEqual(len(hits), 2) self.assertTrue(all(os.path.exists(p) for p in hits)) self.assertTrue(hits[0].endswith('/hwloc-1.6.2-GCC-4.6.4.eb')) self.assertTrue(hits[1].endswith('/hwloc-1.6.2-GCC-4.7.2.eb')) # check filename-only mode var_defs, hits = ft.search_file([test_ecs], 'HWLOC', silent=True, filename_only=True) self.assertEqual(var_defs, []) self.assertEqual(hits, ['hwloc-1.6.2-GCC-4.6.4.eb', 'hwloc-1.6.2-GCC-4.7.2.eb']) # check specifying of ignored dirs var_defs, hits = ft.search_file([test_ecs], 'HWLOC', silent=True, ignore_dirs=['hwloc']) self.assertEqual(var_defs + hits, []) # check short mode var_defs, hits = ft.search_file([test_ecs], 'HWLOC', silent=True, short=True) self.assertEqual(var_defs, [('CFGS1', os.path.join(test_ecs, 'h', 'hwloc'))]) self.assertEqual(hits, ['$CFGS1/hwloc-1.6.2-GCC-4.6.4.eb', '$CFGS1/hwloc-1.6.2-GCC-4.7.2.eb']) # check terse mode (implies 'silent', overrides 'short') var_defs, hits = ft.search_file([test_ecs], 'HWLOC', terse=True, short=True) self.assertEqual(var_defs, []) expected = [ os.path.join(test_ecs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.6.4.eb'), os.path.join(test_ecs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.7.2.eb'), ] self.assertEqual(hits, expected) # check combo of terse and filename-only var_defs, hits = ft.search_file([test_ecs], 'HWLOC', terse=True, filename_only=True) self.assertEqual(var_defs, []) self.assertEqual(hits, ['hwloc-1.6.2-GCC-4.6.4.eb', 'hwloc-1.6.2-GCC-4.7.2.eb'])
def main(testing_data=(None, None, None)): """ Main function: @arg options: a tuple: (options, paths, logger, logfile, hn) as defined in parse_options This function will: - read easyconfig - build software """ # disallow running EasyBuild as root if os.getuid() == 0: sys.stderr.write("ERROR: You seem to be running EasyBuild with root privileges.\n" "That's not wise, so let's end this here.\n" "Exiting.\n") sys.exit(1) # steer behavior when testing main testing = testing_data[0] is not None args, logfile, do_build = testing_data # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args # set umask (as early as possible) if options.umask is not None: new_umask = int(options.umask, 8) old_umask = os.umask(new_umask) # set temporary directory to use eb_tmpdir = set_tmpdir(options.tmpdir) # initialise logging for main if options.logtostdout: fancylogger.logToScreen(enable=True, stdout=True) else: if logfile is None: # mkstemp returns (fd,filename), fd is from os.open, not regular open! fd, logfile = tempfile.mkstemp(suffix='.log', prefix='easybuild-') os.close(fd) fancylogger.logToFile(logfile) print_msg('temporary log file in case of crash %s' % (logfile), log=None, silent=testing) global _log _log = fancylogger.getLogger(fname=False) if options.umask is not None: _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask))) # hello world! _log.info(this_is_easybuild()) # how was EB called? eb_command_line = eb_go.generate_cmd_line() + eb_go.args _log.info("Command line: %s" % (" ".join(eb_command_line))) _log.info("Using %s as temporary directory" % eb_tmpdir) if not options.robot is None: if options.robot: _log.info("Using robot path(s): %s" % options.robot) else: _log.error("No robot paths specified, and unable to determine easybuild-easyconfigs install path.") # do not pass options.robot, it's not a list instance (and it shouldn't be modified) robot_path = None if options.robot: robot_path = list(options.robot) # determine easybuild-easyconfigs package install path easyconfigs_paths = get_paths_for("easyconfigs", robot_path=robot_path) # keep track of paths for install easyconfigs, so we can obtain find specified easyconfigs easyconfigs_pkg_full_paths = easyconfigs_paths[:] if not easyconfigs_paths: _log.warning("Failed to determine install path for easybuild-easyconfigs package.") # specified robot paths are preferred over installed easyconfig files if robot_path: robot_path.extend(easyconfigs_paths) easyconfigs_paths = robot_path[:] _log.info("Extended list of robot paths with paths for installed easyconfigs: %s" % robot_path) # initialise the easybuild configuration config.init(options, eb_go.get_options_by_section('config')) # building a dependency graph implies force, so that all dependencies are retained # and also skips validation of easyconfigs (e.g. checking os dependencies) retain_all_deps = False if options.dep_graph: _log.info("Enabling force to generate dependency graph.") options.force = True retain_all_deps = True config.init_build_options({ 'aggregate_regtest': options.aggregate_regtest, 'allow_modules_tool_mismatch': options.allow_modules_tool_mismatch, 'check_osdeps': not options.ignore_osdeps, 'command_line': eb_command_line, 'debug': options.debug, 'dry_run': options.dry_run, 'easyblock': options.easyblock, 'experimental': options.experimental, 'force': options.force, 'group': options.group, 'ignore_dirs': options.ignore_dirs, 'modules_footer': options.modules_footer, 'only_blocks': options.only_blocks, 'recursive_mod_unload': options.recursive_module_unload, 'regtest_online': options.regtest_online, 'regtest_output_dir': options.regtest_output_dir, 'retain_all_deps': retain_all_deps, 'robot_path': robot_path, 'sequential': options.sequential, 'silent': testing, 'set_gid_bit': options.set_gid_bit, 'skip': options.skip, 'skip_test_cases': options.skip_test_cases, 'sticky_bit': options.sticky_bit, 'stop': options.stop, 'umask': options.umask, 'valid_module_classes': module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], 'validate': not options.force, }) # search for easyconfigs if options.search or options.search_short: search_path = [os.getcwd()] if easyconfigs_paths: search_path = easyconfigs_paths query = options.search or options.search_short ignore_dirs = config.build_option('ignore_dirs') silent = config.build_option('silent') search_file(search_path, query, short=not options.search, ignore_dirs=ignore_dirs, silent=silent) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, build_specs) = process_software_build_specs(options) paths = [] if len(orig_paths) == 0: if 'name' in build_specs: paths = [obtain_path(build_specs, easyconfigs_paths, try_to_generate=try_to_generate, exit_on_error=not testing)] elif not any([options.aggregate_regtest, options.search, options.search_short, options.regtest]): print_error(("Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) else: # look for easyconfigs with relative paths in easybuild-easyconfigs package, # unless they were found at the given relative paths if easyconfigs_pkg_full_paths: # determine which easyconfigs files need to be found, if any ecs_to_find = [] for idx, orig_path in enumerate(orig_paths): if orig_path == os.path.basename(orig_path) and not os.path.exists(orig_path): ecs_to_find.append((idx, orig_path)) _log.debug("List of easyconfig files to find: %s" % ecs_to_find) # find missing easyconfigs by walking paths with installed easyconfig files for path in easyconfigs_pkg_full_paths: _log.debug("Looking for missing easyconfig files (%d left) in %s..." % (len(ecs_to_find), path)) for (subpath, dirnames, filenames) in os.walk(path, topdown=True): for idx, orig_path in ecs_to_find[:]: if orig_path in filenames: full_path = os.path.join(subpath, orig_path) _log.info("Found %s in %s: %s" % (orig_path, path, full_path)) orig_paths[idx] = full_path # if file was found, stop looking for it (first hit wins) ecs_to_find.remove((idx, orig_path)) # stop os.walk insanity as soon as we have all we need (os.walk loop) if len(ecs_to_find) == 0: break # ignore subdirs specified to be ignored by replacing items in dirnames list used by os.walk dirnames[:] = [d for d in dirnames if not d in options.ignore_dirs] # stop os.walk insanity as soon as we have all we need (paths loop) if len(ecs_to_find) == 0: break # indicate that specified paths do not contain generated easyconfig files paths = [(path, False) for path in orig_paths] _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") if paths: ec_paths = [path[0] for path in paths] else: # fallback: easybuild-easyconfigs install path ec_paths = easyconfigs_pkg_full_paths regtest_ok = regtest(ec_paths) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 # read easyconfig files easyconfigs = [] for (path, generated) in paths: path = os.path.abspath(path) if not os.path.exists(path): print_error("Can't find path %s" % path) try: files = find_easyconfigs(path, ignore_dirs=options.ignore_dirs) for f in files: if not generated and try_to_generate and build_specs: ec_file = tweak(f, None, build_specs) else: ec_file = f ecs = process_easyconfig(ec_file, build_specs=build_specs) easyconfigs.extend(ecs) except IOError, err: _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))
def test_search_file(self): """Test search_file function.""" test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') # check for default semantics, test case-insensitivity var_defs, hits = ft.search_file([test_ecs], 'HWLOC', silent=True) self.assertEqual(var_defs, []) self.assertEqual(len(hits), 2) self.assertTrue(all(os.path.exists(p) for p in hits)) self.assertTrue(hits[0].endswith('/hwloc-1.6.2-GCC-4.6.4.eb')) self.assertTrue(hits[1].endswith('/hwloc-1.6.2-GCC-4.7.2.eb')) # check filename-only mode var_defs, hits = ft.search_file([test_ecs], 'HWLOC', silent=True, filename_only=True) self.assertEqual(var_defs, []) self.assertEqual( hits, ['hwloc-1.6.2-GCC-4.6.4.eb', 'hwloc-1.6.2-GCC-4.7.2.eb']) # check specifying of ignored dirs var_defs, hits = ft.search_file([test_ecs], 'HWLOC', silent=True, ignore_dirs=['hwloc']) self.assertEqual(var_defs + hits, []) # check short mode var_defs, hits = ft.search_file([test_ecs], 'HWLOC', silent=True, short=True) self.assertEqual(var_defs, [('CFGS1', os.path.join(test_ecs, 'h', 'hwloc'))]) self.assertEqual(hits, [ '$CFGS1/hwloc-1.6.2-GCC-4.6.4.eb', '$CFGS1/hwloc-1.6.2-GCC-4.7.2.eb' ]) # check terse mode (implies 'silent', overrides 'short') var_defs, hits = ft.search_file([test_ecs], 'HWLOC', terse=True, short=True) self.assertEqual(var_defs, []) expected = [ os.path.join(test_ecs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.6.4.eb'), os.path.join(test_ecs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.7.2.eb'), ] self.assertEqual(hits, expected) # check combo of terse and filename-only var_defs, hits = ft.search_file([test_ecs], 'HWLOC', terse=True, filename_only=True) self.assertEqual(var_defs, []) self.assertEqual( hits, ['hwloc-1.6.2-GCC-4.6.4.eb', 'hwloc-1.6.2-GCC-4.7.2.eb'])
def search_easyconfigs(query, short=False, filename_only=False, terse=False, consider_extra_paths=True, print_result=True, case_sensitive=False): """ Search for easyconfigs, if a query is provided. :param query: regex query string :param short: figure out common prefix of hits, use variable to factor it out :param filename_only: only print filenames, not paths :param terse: stick to terse (machine-readable) output, as opposed to pretty-printing :param consider_extra_paths: consider all paths when searching :param print_result: print the list of easyconfigs :param case_sensitive: boolean to decide whether search is case sensitive :return: return a list of paths for the query """ search_path = build_option('robot_path') if not search_path: search_path = [os.getcwd()] extra_search_paths = build_option('search_paths') # If we're returning a list of possible resolutions by the robot, don't include the extra_search_paths if extra_search_paths and consider_extra_paths: # we shouldn't use += or .extend here but compose a new list, # to avoid adding a path to the list returned by build_option('robot_path') ! search_path = search_path + extra_search_paths ignore_dirs = build_option('ignore_dirs') # note: don't pass down 'filename_only' here, we need the full path to filter out archived easyconfigs var_defs, _hits = search_file(search_path, query, short=short, ignore_dirs=ignore_dirs, terse=terse, silent=True, filename_only=False, case_sensitive=case_sensitive) # filter out archived easyconfigs, these are handled separately hits, archived_hits = [], [] for hit in _hits: if EASYCONFIGS_ARCHIVE_DIR in hit.split(os.path.sep): archived_hits.append(hit) else: hits.append(hit) # check whether only filenames should be used if filename_only: hits = [os.path.basename(hit) for hit in hits] archived_hits = [os.path.basename(hit) for hit in archived_hits] if print_result: # prepare output format if terse: lines, tmpl = [], '%s' else: lines = ['%s=%s' % var_def for var_def in var_defs] tmpl = ' * %s' # non-archived hits are shown first lines.extend(tmpl % hit for hit in hits) # also take into account archived hits if archived_hits: if build_option('consider_archived_easyconfigs'): if not terse: lines.extend(['', "Matching archived easyconfigs:", '']) lines.extend(tmpl % hit for hit in archived_hits) elif not terse: cnt = len(archived_hits) lines.extend([ '', "Note: %d matching archived easyconfig(s) found, use --consider-archived-easyconfigs to see them" % cnt, ]) print('\n'.join(lines)) # if requested return the matches as a list if build_option('consider_archived_easyconfigs'): final_hits = hits + archived_hits else: final_hits = hits return final_hits
def main(): """Main function.""" if len(sys.argv) == 3: ec = sys.argv[1] tc = sys.argv[2] else: error("Usage %s <easyconfig> <toolchain> [<name=version>]" % sys.argv[0]) tc_name, tc_ver = tc.split('/') print("Updating %s for %s toolchain version %s..." % (ec, tc_name, tc_ver)) set_up_configuration(silent=True) modtool = modules_tool() robot_path = build_option('robot_path') ec_path = det_easyconfig_paths([ec])[0] print("Found %s easyconfig file at %s" % (ec, ec_path)) parsed_ecs, _ = parse_easyconfigs([(ec_path, False)], validate=False) print("Resolving dependencies... ", end='') ecs = resolve_dependencies(parsed_ecs, modtool, retain_all_deps=True) print("found stack of %d easyconfigs" % len(ecs)) print("Filtering toolchain and its dependencies...") ec_tc = parsed_ecs[0]['ec']['toolchain'] ecs_to_remove = [{ 'name': ec_tc['name'], 'version': ec_tc['version'], 'toolchain': { 'name': SYSTEM_TOOLCHAIN_NAME } }] updated_ecs = {} # if GCCcore is used as toolchain, determine binutils version to use if tc_name == 'GCCcore': binutils_pattern = '^binutils.*-%s-%s.*.eb$' % (tc_name, tc_ver) _, res = search_file(robot_path, binutils_pattern) if res: if len(res) == 1: parsed_ecs, _ = parse_easyconfigs([(res[0], False)]) binutils_ec = parsed_ecs[0] tc = copy.copy(binutils_ec['ec']['toolchain']) ecs_to_remove.append({ 'name': 'binutils', 'version': binutils_ec['ec'].version, 'toolchain': tc }) else: error("Found more than one easyconfig matching '%s': %s" % (binutils_pattern, res)) else: error("No easyconfig file found for binutils using pattern '%s'" % binutils_pattern) while (ecs_to_remove): to_remove = ecs_to_remove.pop(0) print("Removing %(name)s/%(version)s (toolchain: %(toolchain)s)" % to_remove) for ec in ecs: if ec['ec'].name == to_remove['name'] and ec['ec'].version == to_remove['version'] and \ ec['ec']['toolchain']['name'] == to_remove['toolchain']['name']: ecs.remove(ec) ecs_to_remove.extend(dep for dep in ec['ec']['dependencies'] + ec['ec']['builddependencies']) updated_ecs[ec['full_mod_name']] = { 'builddependencies': [], 'dependencies': [], 'toolchain': copy.copy(ec['ec']['toolchain']), 'version': ec['ec'].version, } break ecs_to_write = [] for ec in ecs: ec_fn = os.path.basename(ec['spec']) print(term.bold("Determining version for %s..." % ec_fn)) full_mod_name = ec['full_mod_name'] ec_tc = copy.copy(ec['ec']['toolchain']) # update toolchain (unless it's SYSTEM) if ec_tc['name'] != SYSTEM_TOOLCHAIN_NAME: if ec_tc['name'] == tc_name: ec_tc['version'] = tc_ver else: error("Don't know how to update toolchain %s" % ec_tc['name']) # update (build) dependencies build_deps = [] for dep in ec['ec']['builddependencies']: new_dep_ver = updated_ecs[dep['full_mod_name']]['version'] build_deps.append((dep['name'], new_dep_ver)) deps = [] for dep in ec['ec']['dependencies']: new_dep_ver = updated_ecs[dep['full_mod_name']]['version'] deps.append((dep['name'], new_dep_ver)) # determine software version to use; # first, try searching for an existing easyconfig with specified toolchain; # if that fails, try to determine latest upstream version ec_pattern = '^%s.*-%s-%s.*.eb$' % (ec['ec'].name, tc_name, tc_ver) _, res = search_file(robot_path, ec_pattern) if res: if len(res) == 1: parsed_ecs, _ = parse_easyconfigs([(res[0], False)]) ec = parsed_ecs[0] new_version = ec['ec'].version print( term.green( "Found existing easyconfig, sticking to version %s" % new_version)) else: error("Multiple hits found using '%s': %s" % (res, ec_pattern)) else: new_version = update_version(ec['ec']) ecs_to_write.append(ec) if new_version is None: print( term.yellow( "No new version found for %s, using existing version" % full_mod_name)) new_version = ec['ec'].version updated_ecs[full_mod_name] = { 'builddependencies': build_deps, 'dependencies': deps, 'toolchain': ec_tc, 'version': new_version, } for ec in ecs_to_write: full_mod_name = ec['full_mod_name'] pprint.pprint(full_mod_name) ec = ec['ec'] ectxt = ec.rawtxt key_pattern = r'^%s\s*=.*' list_key_pattern = r'^%s\s*=\s*\[([^\]]|\n)*\s*\]' new_version = updated_ecs[full_mod_name]['version'] if ec.version != new_version: regex = re.compile(key_pattern % 'version', re.M) ectxt = regex.sub("version = '%s'" % new_version, ectxt) # if version got updated, also wipe the checksums regex = re.compile(list_key_pattern % 'checksums', re.M) ectxt = regex.sub("checksums = []", ectxt) # toolchain tc_str = "toolchain = {'name': '%(name)s', 'version': '%(version)s'}" % updated_ecs[ full_mod_name]['toolchain'] regex = re.compile(key_pattern % 'toolchain', re.M) ectxt = regex.sub(tc_str, ectxt) # dependencies for key in ('builddependencies', 'dependencies'): deps_str = '%s = [\n' % key for dep in updated_ecs[full_mod_name][key]: deps_str += ' ' + str(dep) + ',\n' deps_str += ']' regex = re.compile(list_key_pattern % key, re.M) ectxt = regex.sub(deps_str, ectxt) specs = { 'name': ec.name, 'toolchain': updated_ecs[full_mod_name]['toolchain'], 'version': new_version, 'versionsuffix': ec['versionsuffix'], } ec_fn = '%s-%s.eb' % (ec.name, det_full_ec_version(specs)) write_file(ec_fn, ectxt) print(term.green("%s written" % ec_fn))
def main(testing_data=(None, None, None)): """ Main function: @arg options: a tuple: (options, paths, logger, logfile, hn) as defined in parse_options This function will: - read easyconfig - build software """ # purposely session state very early, to avoid modules loaded by EasyBuild meddling in init_session_state = session_state() # disallow running EasyBuild as root if os.getuid() == 0: sys.stderr.write( "ERROR: You seem to be running EasyBuild with root privileges.\n" "That's not wise, so let's end this here.\n" "Exiting.\n") sys.exit(1) # steer behavior when testing main testing = testing_data[0] is not None args, logfile, do_build = testing_data # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args eb_config = eb_go.generate_cmd_line(add_default=True) init_session_state.update({'easybuild_configuration': eb_config}) # set umask (as early as possible) if options.umask is not None: new_umask = int(options.umask, 8) old_umask = os.umask(new_umask) # set temporary directory to use eb_tmpdir = set_tmpdir(options.tmpdir) # initialise logging for main if options.logtostdout: fancylogger.logToScreen(enable=True, stdout=True) else: if logfile is None: # mkstemp returns (fd,filename), fd is from os.open, not regular open! fd, logfile = tempfile.mkstemp(suffix='.log', prefix='easybuild-') os.close(fd) fancylogger.logToFile(logfile) print_msg('temporary log file in case of crash %s' % (logfile), log=None, silent=testing) global _log _log = fancylogger.getLogger(fname=False) if options.umask is not None: _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask))) # hello world! _log.info(this_is_easybuild()) # how was EB called? eb_command_line = eb_go.generate_cmd_line() + eb_go.args _log.info("Command line: %s" % (" ".join(eb_command_line))) _log.info("Using %s as temporary directory" % eb_tmpdir) if not options.robot is None: if options.robot: _log.info("Using robot path(s): %s" % options.robot) else: _log.error( "No robot paths specified, and unable to determine easybuild-easyconfigs install path." ) # do not pass options.robot, it's not a list instance (and it shouldn't be modified) robot_path = None if options.robot: robot_path = list(options.robot) # determine easybuild-easyconfigs package install path easyconfigs_paths = get_paths_for("easyconfigs", robot_path=robot_path) # keep track of paths for install easyconfigs, so we can obtain find specified easyconfigs easyconfigs_pkg_full_paths = easyconfigs_paths[:] if not easyconfigs_paths: _log.warning( "Failed to determine install path for easybuild-easyconfigs package." ) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, build_specs) = process_software_build_specs(options) # specified robot paths are preferred over installed easyconfig files # --try-X and --dep-graph both require --robot, so enable it with path of installed easyconfigs if robot_path or try_to_generate or options.dep_graph: if robot_path is None: robot_path = [] robot_path.extend(easyconfigs_paths) easyconfigs_paths = robot_path[:] _log.info( "Extended list of robot paths with paths for installed easyconfigs: %s" % robot_path) # initialise the easybuild configuration config.init(options, eb_go.get_options_by_section('config')) # building a dependency graph implies force, so that all dependencies are retained # and also skips validation of easyconfigs (e.g. checking os dependencies) retain_all_deps = False if options.dep_graph: _log.info("Enabling force to generate dependency graph.") options.force = True retain_all_deps = True config.init_build_options({ 'aggregate_regtest': options.aggregate_regtest, 'allow_modules_tool_mismatch': options.allow_modules_tool_mismatch, 'check_osdeps': not options.ignore_osdeps, 'cleanup_builddir': options.cleanup_builddir, 'command_line': eb_command_line, 'debug': options.debug, 'dry_run': options.dry_run, 'easyblock': options.easyblock, 'experimental': options.experimental, 'force': options.force, 'github_user': options.github_user, 'group': options.group, 'ignore_dirs': options.ignore_dirs, 'modules_footer': options.modules_footer, 'only_blocks': options.only_blocks, 'recursive_mod_unload': options.recursive_module_unload, 'regtest_output_dir': options.regtest_output_dir, 'retain_all_deps': retain_all_deps, 'robot_path': robot_path, 'sequential': options.sequential, 'silent': testing, 'set_gid_bit': options.set_gid_bit, 'skip': options.skip, 'skip_test_cases': options.skip_test_cases, 'sticky_bit': options.sticky_bit, 'stop': options.stop, 'umask': options.umask, 'valid_module_classes': module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], 'validate': not options.force, }) # obtain list of loaded modules, build options must be initialized first modlist = session_module_list() init_session_state.update({'module_list': modlist}) _log.debug("Initial session state: %s" % init_session_state) # search for easyconfigs if options.search or options.search_short: search_path = [os.getcwd()] if easyconfigs_paths: search_path = easyconfigs_paths query = options.search or options.search_short ignore_dirs = config.build_option('ignore_dirs') silent = config.build_option('silent') search_file(search_path, query, short=not options.search, ignore_dirs=ignore_dirs, silent=silent) paths = [] if len(orig_paths) == 0: if options.from_pr: pr_path = os.path.join(eb_tmpdir, "files_pr%s" % options.from_pr) pr_files = fetch_easyconfigs_from_pr( options.from_pr, path=pr_path, github_user=options.github_user) paths = [(path, False) for path in pr_files if path.endswith('.eb')] elif 'name' in build_specs: paths = [ obtain_path(build_specs, easyconfigs_paths, try_to_generate=try_to_generate, exit_on_error=not testing) ] elif not any([ options.aggregate_regtest, options.search, options.search_short, options.regtest ]): print_error(( "Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) else: # look for easyconfigs with relative paths in easybuild-easyconfigs package, # unless they were found at the given relative paths if easyconfigs_pkg_full_paths: # determine which easyconfigs files need to be found, if any ecs_to_find = [] for idx, orig_path in enumerate(orig_paths): if orig_path == os.path.basename( orig_path) and not os.path.exists(orig_path): ecs_to_find.append((idx, orig_path)) _log.debug("List of easyconfig files to find: %s" % ecs_to_find) # find missing easyconfigs by walking paths with installed easyconfig files for path in easyconfigs_pkg_full_paths: _log.debug( "Looking for missing easyconfig files (%d left) in %s..." % (len(ecs_to_find), path)) for (subpath, dirnames, filenames) in os.walk(path, topdown=True): for idx, orig_path in ecs_to_find[:]: if orig_path in filenames: full_path = os.path.join(subpath, orig_path) _log.info("Found %s in %s: %s" % (orig_path, path, full_path)) orig_paths[idx] = full_path # if file was found, stop looking for it (first hit wins) ecs_to_find.remove((idx, orig_path)) # stop os.walk insanity as soon as we have all we need (os.walk loop) if len(ecs_to_find) == 0: break # ignore subdirs specified to be ignored by replacing items in dirnames list used by os.walk dirnames[:] = [ d for d in dirnames if not d in options.ignore_dirs ] # stop os.walk insanity as soon as we have all we need (paths loop) if len(ecs_to_find) == 0: break # indicate that specified paths do not contain generated easyconfig files paths = [(path, False) for path in orig_paths] _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") if paths: ec_paths = [path[0] for path in paths] else: # fallback: easybuild-easyconfigs install path ec_paths = easyconfigs_pkg_full_paths regtest_ok = regtest(ec_paths) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 # read easyconfig files easyconfigs = [] for (path, generated) in paths: path = os.path.abspath(path) if not os.path.exists(path): print_error("Can't find path %s" % path) try: ec_files = find_easyconfigs(path, ignore_dirs=options.ignore_dirs) for ec_file in ec_files: # only pass build specs when not generating easyconfig files if try_to_generate: ecs = process_easyconfig(ec_file) else: ecs = process_easyconfig(ec_file, build_specs=build_specs) easyconfigs.extend(ecs) except IOError, err: _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))