def test_changed_files_pull_request(self): """Specific checks only done for the (easyconfig) files that were changed in a pull request.""" # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR if re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)')): # target branch should be anything other than 'master'; # usually is 'develop', but could also be a release branch like '3.7.x' travis_branch = os.environ.get('TRAVIS_BRANCH', None) if travis_branch and travis_branch != 'master': if not self.parsed_easyconfigs: self.process_all_easyconfigs() # relocate to top-level directory of repository to run 'git diff' command top_dir = os.path.dirname( os.path.dirname(get_paths_for('easyconfigs')[0])) cwd = change_dir(top_dir) # get list of changed easyconfigs cmd = "git diff --name-only --diff-filter=AM %s...HEAD" % travis_branch out, ec = run_cmd(cmd, simple=False) changed_ecs_filenames = [ os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb') ] print("\nList of changed easyconfig files in this PR: %s" % '\n'.join(changed_ecs_filenames)) change_dir(cwd) # grab parsed easyconfigs for changed easyconfig files changed_ecs = [] for ec_fn in changed_ecs_filenames: match = None for ec in self.parsed_easyconfigs: if os.path.basename(ec['spec']) == ec_fn: match = ec['ec'] break if match: changed_ecs.append(match) else: # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR... # so as a last resort, try to find the easyconfig file in __archive__ easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/__archive__/*/*/%s' % (easyconfigs_path, ec_fn)) if len(specs) == 1: ec = process_easyconfig(specs[0])[0] changed_ecs.append(ec['ec']) else: error_msg = "Failed to find parsed easyconfig for %s" % ec_fn error_msg += " (and could not isolate it in easyconfigs archive either)" self.assertTrue(False, error_msg) # run checks on changed easyconfigs self.check_sha256_checksums(changed_ecs) self.check_python_packages(changed_ecs)
def test_changed_files_pull_request(self): """Specific checks only done for the (easyconfig) files that were changed in a pull request.""" # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR if re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)')): # target branch should be anything other than 'master'; # usually is 'develop', but could also be a release branch like '3.7.x' travis_branch = os.environ.get('TRAVIS_BRANCH', None) if travis_branch and travis_branch != 'master': if not self.parsed_easyconfigs: self.process_all_easyconfigs() # relocate to top-level directory of repository to run 'git diff' command top_dir = os.path.dirname(os.path.dirname(get_paths_for('easyconfigs')[0])) cwd = change_dir(top_dir) # get list of changed easyconfigs cmd = "git diff --name-only --diff-filter=AM %s...HEAD" % travis_branch out, ec = run_cmd(cmd, simple=False) changed_ecs_filenames = [os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb')] print("\nList of changed easyconfig files in this PR: %s" % '\n'.join(changed_ecs_filenames)) change_dir(cwd) # grab parsed easyconfigs for changed easyconfig files changed_ecs = [] for ec_fn in changed_ecs_filenames: match = None for ec in self.parsed_easyconfigs: if os.path.basename(ec['spec']) == ec_fn: match = ec['ec'] break if match: changed_ecs.append(match) else: # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR... # so as a last resort, try to find the easyconfig file in __archive__ easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/__archive__/*/*/%s' % (easyconfigs_path, ec_fn)) if len(specs) == 1: ec = process_easyconfig(specs[0])[0] changed_ecs.append(ec['ec']) else: error_msg = "Failed to find parsed easyconfig for %s" % ec_fn error_msg += " (and could not isolate it in easyconfigs archive either)" self.assertTrue(False, error_msg) # run checks on changed easyconfigs self.check_sha256_checksums(changed_ecs) self.check_python_packages(changed_ecs)
def __init__(self, *args, **kwargs): """Constructor.""" self.default_repositorypath = [mk_full_default_path('repositorypath')] self.default_robot_paths = get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR, robot_path=None) or [] # set up constants to seed into config files parser, by section self.go_cfg_constants = { self.DEFAULTSECT: { 'DEFAULT_REPOSITORYPATH': (self.default_repositorypath[0], "Default easyconfigs repository path"), 'DEFAULT_ROBOT_PATHS': (os.pathsep.join(self.default_robot_paths), "List of default robot paths ('%s'-separated)" % os.pathsep), } } # update or define go_configfiles_initenv in named arguments to pass to parent constructor go_cfg_initenv = kwargs.setdefault('go_configfiles_initenv', {}) for section, constants in self.go_cfg_constants.items(): constants = dict([(name, value) for (name, (value, _)) in constants.items()]) go_cfg_initenv.setdefault(section, {}).update(constants) super(EasyBuildOptions, self).__init__(*args, **kwargs)
def basic_options(self): """basic runtime options""" all_stops = [x[0] for x in EasyBlock.get_steps()] strictness_options = [filetools.IGNORE, filetools.WARN, filetools.ERROR] try: default_robot_path = get_paths_for("easyconfigs", robot_path=None)[0] except: self.log.warning("basic_options: unable to determine default easyconfig path") default_robot_path = False # False as opposed to None, since None is used for indicating that --robot was not used descr = ("Basic options", "Basic runtime options for EasyBuild.") opts = OrderedDict({ "only-blocks":("Only build listed blocks", None, "extend", None, "b", {'metavar':"BLOCKS"}), "force":(("Force to rebuild software even if it's already installed " "(i.e. if it can be found as module)"), None, "store_true", False, "f"), "job":("Submit the build as a job", None, "store_true", False), "skip":("Skip existing software (useful for installing additional packages)", None, "store_true", False, "k"), "robot":("Path to search for easyconfigs for missing dependencies." , None, "store_or_None", default_robot_path, "r", {'metavar':"PATH"}), "stop":("Stop the installation after certain step", "choice", "store_or_None", "source", "s", all_stops), "strict":("Set strictness level", "choice", "store", filetools.WARN, strictness_options), "logtostdout":("Redirect main log to stdout", None, "store_true", False, "l"), }) self.log.debug("basic_options: descr %s opts %s" % (descr, opts)) self.add_group_parser(opts, descr)
def basic_options(self): """basic runtime options""" all_stops = [x[0] for x in EasyBlock.get_steps()] strictness_options = [run.IGNORE, run.WARN, run.ERROR] try: default_robot_path = get_paths_for("easyconfigs", robot_path=None)[0] except: self.log.warning("basic_options: unable to determine default easyconfig path") default_robot_path = False # False as opposed to None, since None is used for indicating that --robot was not used descr = ("Basic options", "Basic runtime options for EasyBuild.") opts = OrderedDict({ 'dry-run': ("Print build overview incl. dependencies (full paths)", None, 'store_true', False), 'dry-run-short': ("Print build overview incl. dependencies (short paths)", None, 'store_true', False, 'D'), 'force': ("Force to rebuild software even if it's already installed (i.e. if it can be found as module)", None, 'store_true', False, 'f'), 'job': ("Submit the build as a job", None, 'store_true', False), 'logtostdout': ("Redirect main log to stdout", None, 'store_true', False, 'l'), 'only-blocks': ("Only build listed blocks", None, 'extend', None, 'b', {'metavar': 'BLOCKS'}), 'robot': ("Path(s) to search for easyconfigs for missing dependencies (colon-separated)" , None, 'store_or_None', default_robot_path, 'r', {'metavar': 'PATH'}), 'skip': ("Skip existing software (useful for installing additional packages)", None, 'store_true', False, 'k'), 'stop': ("Stop the installation after certain step", 'choice', 'store_or_None', 'source', 's', all_stops), 'strict': ("Set strictness level", 'choice', 'store', run.WARN, strictness_options), }) self.log.debug("basic_options: descr %s opts %s" % (descr, opts)) self.add_group_parser(opts, descr)
def suite(): """Return all easyblock --module-only tests.""" # initialize configuration (required for e.g. default modules_tool setting) cleanup() eb_go = eboptions.parse_options(args=['--prefix=%s' % TMPDIR]) config.init(eb_go.options, eb_go.get_options_by_section('config')) build_options = { # enable --force --module-only 'force': True, 'module_only': True, 'silent': True, 'suffix_modules_path': GENERAL_CLASS, 'valid_module_classes': config.module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } config.init_build_options(build_options=build_options) config.set_tmpdir() # dynamically generate a separate test for each of the available easyblocks easyblocks_path = get_paths_for("easyblocks")[0] all_pys = glob.glob('%s/*/*.py' % easyblocks_path) easyblocks = [eb for eb in all_pys if os.path.basename(eb) != '__init__.py' and '/test/' not in eb] # filter out no longer supported easyblocks easyblocks = [e for e in easyblocks if os.path.basename(e) not in ['versionindependendpythonpackage.py']] for easyblock in easyblocks: # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest exec("def innertest(self): template_module_only_test(self, '%s')" % easyblock) innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % '_'.join(easyblock.replace('.py', '').split('/')) setattr(ModuleOnlyTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
def suite(): """Return all easyblock initialisation tests.""" # dynamically generate a separate test for each of the available easyblocks easyblocks_path = get_paths_for("easyblocks")[0] all_pys = glob.glob('%s/*/*.py' % easyblocks_path) easyblocks = [ eb for eb in all_pys if not eb.endswith('__init__.py') and not '/test/' in eb ] for easyblock in easyblocks: # dynamically define new inner functions that can be added as class methods to InitTest if os.path.basename(easyblock) == 'systemcompiler.py': # use GCC as name when testing SystemCompiler easyblock exec( "def innertest(self): template_init_test(self, '%s', name='GCC', version='system')" % easyblock) else: exec("def innertest(self): template_init_test(self, '%s')" % easyblock) innertest.__doc__ = "Test for initialisation of easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % '_'.join( easyblock.replace('.py', '').split('/')) setattr(InitTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(InitTest)
def suite(): """Return all easyblock initialisation tests.""" # dynamically generate a separate test for each of the available easyconfigs # define new inner functions that can be added as class methods to InitTest easyconfigs_path = get_paths_for('easyconfigs')[0] cnt = 0 for (subpath, _, specs) in os.walk(easyconfigs_path, topdown=True): # ignore archived easyconfigs if '__archive__' in subpath: continue for spec in specs: if spec.endswith('.eb') and spec != 'TEMPLATE.eb': cnt += 1 exec( "def innertest(self): template_easyconfig_test(self, '%s')" % os.path.join(subpath, spec)) innertest.__doc__ = "Test for parsing of easyconfig %s" % spec # double underscore so parsing tests are run first innertest.__name__ = "test__parse_easyconfig_%s" % spec setattr(EasyConfigTest, innertest.__name__, innertest) print "Found %s easyconfigs..." % cnt return TestLoader().loadTestsFromTestCase(EasyConfigTest)
def basic_options(self): """basic runtime options""" all_stops = [x[0] for x in EasyBlock.get_steps()] strictness_options = [filetools.IGNORE, filetools.WARN, filetools.ERROR] try: default_robot_path = get_paths_for("easyconfigs", robot_path=None)[0] except: self.log.warning("basic_options: unable to determine default easyconfig path") default_robot_path = False # False as opposed to None, since None is used for indicating that --robot was not used descr = ("Basic options", "Basic runtime options for EasyBuild.") opts = OrderedDict({ "only-blocks":("Only build listed blocks", None, "extend", None, "b", {'metavar':"BLOCKS"}), "force":(("Force to rebuild software even if it's already installed " "(i.e. if it can be found as module)"), None, "store_true", False, "f"), "job":("Submit the build as a job", None, "store_true", False), "skip":("Skip existing software (useful for installing additional packages)", None, "store_true", False, "k"), "robot":("Path to search for easyconfigs for missing dependencies." , None, "store_or_None", default_robot_path, "r", {'metavar':"PATH"}), "stop":("Stop the installation after certain step", "choice", "store_or_None", "source", "s", all_stops), "strict":("Set strictness level", "choice", "store", filetools.WARN, strictness_options), "logtostdout":("Redirect main log to stdout", None, "store_true", False, "l"), "dry-run":("Resolve dependencies and print build list, then stop", None, "store_true", False), }) self.log.debug("basic_options: descr %s opts %s" % (descr, opts)) self.add_group_parser(opts, descr)
def suite(): """Return all easyblock --module-only tests.""" # initialize configuration (required for e.g. default modules_tool setting) cleanup() eb_go = eboptions.parse_options(args=['--prefix=%s' % TMPDIR]) config.init(eb_go.options, eb_go.get_options_by_section('config')) build_options = { 'external_modules_metadata': {}, # enable --force --module-only 'force': True, 'module_only': True, 'silent': True, 'suffix_modules_path': GENERAL_CLASS, 'valid_module_classes': config.module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } config.init_build_options(build_options=build_options) set_tmpdir() # dynamically generate a separate test for each of the available easyblocks easyblocks_path = get_paths_for("easyblocks")[0] all_pys = glob.glob('%s/*/*.py' % easyblocks_path) easyblocks = [ eb for eb in all_pys if os.path.basename(eb) != '__init__.py' and '/test/' not in eb ] # filter out no longer supported easyblocks, or easyblocks that are tested in a different way excluded_easyblocks = ['versionindependendpythonpackage.py'] easyblocks = [ e for e in easyblocks if os.path.basename(e) not in excluded_easyblocks ] # add dummy PrgEnv-gnu/1.2.3 module, required for testing CrayToolchain easyblock write_file(os.path.join(TMPDIR, 'modules', 'all', 'PrgEnv-gnu', '1.2.3'), "#%Module") for easyblock in easyblocks: # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest if os.path.basename(easyblock) == 'systemcompiler.py': # use GCC as name when testing SystemCompiler easyblock exec( "def innertest(self): template_module_only_test(self, '%s', name='GCC', version='system')" % easyblock) elif os.path.basename(easyblock) == 'craytoolchain.py': # make sure that a (known) PrgEnv is included as a dependency extra_txt = 'dependencies = [("PrgEnv-gnu/1.2.3", EXTERNAL_MODULE)]' exec( "def innertest(self): template_module_only_test(self, '%s', extra_txt='%s')" % (easyblock, extra_txt)) else: exec("def innertest(self): template_module_only_test(self, '%s')" % easyblock) innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % '_'.join( easyblock.replace('.py', '').split('/')) setattr(ModuleOnlyTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
class EasyConfigTest(TestCase): """Baseclass for easyconfig testcases.""" if LooseVersion(sys.version) >= LooseVersion('2.6'): os.environ['EASYBUILD_DEPRECATED'] = '2.0' # initialize configuration (required for e.g. default modules_tool setting) eb_go = eboptions.parse_options() config.init(eb_go.options, eb_go.get_options_by_section('config')) build_options = { 'check_osdeps': False, 'force': True, 'robot_path': get_paths_for("easyconfigs")[0], 'suffix_modules_path': GENERAL_CLASS, 'valid_module_classes': config.module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } config.init_build_options(build_options=build_options) config.set_tmpdir() del eb_go log = fancylogger.getLogger("EasyConfigTest", fname=False) # make sure a logger is present for main main._log = log ordered_specs = None parsed_easyconfigs = [] def process_all_easyconfigs(self): """Process all easyconfigs and resolve inter-easyconfig dependencies.""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) # parse all easyconfigs if they haven't been already if not self.parsed_easyconfigs: for spec in specs: self.parsed_easyconfigs.extend(process_easyconfig(spec)) self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs) def test_dep_graph(self): """Unit test that builds a full dependency graph.""" # pygraph dependencies required for constructing dependency graph are not available prior to Python 2.6 if LooseVersion( sys.version) >= LooseVersion('2.6') and single_tests_ok: # temporary file for dep graph (hn, fn) = tempfile.mkstemp(suffix='.dot') os.close(hn) if self.ordered_specs is None: self.process_all_easyconfigs() dep_graph(fn, self.ordered_specs, silent=True) try: os.remove(fn) except OSError, err: log.error("Failed to remove %s: %s" % (fn, err)) else:
def process_all_easyconfigs(self): """Process all easyconfigs and resolve inter-easyconfig dependencies.""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) # parse all easyconfigs if they haven't been already if not self.parsed_easyconfigs: for spec in specs: self.parsed_easyconfigs.extend(process_easyconfig(spec)) self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs)
def process_all_easyconfigs(self): """Process all easyconfigs and resolve inter-easyconfig dependencies.""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) # parse all easyconfigs easyconfigs = [] for spec in specs: easyconfigs.extend(process_easyconfig(spec, validate=False)) self.ordered_specs = resolve_dependencies(easyconfigs, easyconfigs_path, force=True)
def suite(): """Return all easyblock initialisation tests.""" def make_inner_test(easyblock, **kwargs): def innertest(self): template_init_test(self, easyblock, **kwargs) return innertest # dynamically generate a separate test for each of the available easyblocks easyblocks_path = get_paths_for("easyblocks")[0] all_pys = glob.glob('%s/*/*.py' % easyblocks_path) easyblocks = [ eb for eb in all_pys if not eb.endswith('__init__.py') and '/test/' not in eb ] for easyblock in easyblocks: easyblock_fn = os.path.basename(easyblock) # dynamically define new inner functions that can be added as class methods to InitTest if easyblock_fn == 'systemcompiler.py': # use GCC as name when testing SystemCompiler easyblock innertest = make_inner_test(easyblock, name='GCC', version='system') elif easyblock_fn == 'systemmpi.py': # use OpenMPI as name when testing SystemMPI easyblock innertest = make_inner_test(easyblock, name='OpenMPI', version='system') elif easyblock_fn == 'intel_compilers.py': # custom easyblock for intel-compilers (oneAPI) requires v2021.x or newer innertest = make_inner_test(easyblock, name='intel-compilers', version='2021.1') elif easyblock_fn == 'openfoam.py': # custom easyblock for OpenFOAM requires non-system toolchain innertest = make_inner_test(easyblock, toolchain={ 'name': 'foss', 'version': '2021a' }) elif easyblock_fn == 'openssl_wrapper.py': # easyblock to create OpenSSL wrapper expects an OpenSSL version innertest = make_inner_test(easyblock, version='1.1') else: innertest = make_inner_test(easyblock) innertest.__doc__ = "Test for initialisation of easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % '_'.join( easyblock.replace('.py', '').split('/')) setattr(InitTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(InitTest)
def test_style_conformance(self): """Check the easyconfigs for style""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) specs = sorted(specs) result = check_easyconfigs_style(specs) self.assertEqual( result, 0, "Found code style errors (and/or warnings): %s" % result)
def suite(): """Return all easyblock --module-only tests.""" # initialize configuration (required for e.g. default modules_tool setting) cleanup() eb_go = eboptions.parse_options(args=['--prefix=%s' % TMPDIR]) config.init(eb_go.options, eb_go.get_options_by_section('config')) build_options = { 'external_modules_metadata': {}, # enable --force --module-only 'force': True, 'module_only': True, 'silent': True, 'suffix_modules_path': GENERAL_CLASS, 'valid_module_classes': config.module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } config.init_build_options(build_options=build_options) set_tmpdir() # dynamically generate a separate test for each of the available easyblocks easyblocks_path = get_paths_for("easyblocks")[0] all_pys = glob.glob('%s/*/*.py' % easyblocks_path) easyblocks = [eb for eb in all_pys if os.path.basename(eb) != '__init__.py' and '/test/' not in eb] # filter out no longer supported easyblocks, or easyblocks that are tested in a different way excluded_easyblocks = ['versionindependendpythonpackage.py'] easyblocks = [e for e in easyblocks if os.path.basename(e) not in excluded_easyblocks] # add dummy PrgEnv-* modules, required for testing CrayToolchain easyblock for prgenv in ['PrgEnv-cray', 'PrgEnv-gnu', 'PrgEnv-intel', 'PrgEnv-pgi']: write_file(os.path.join(TMPDIR, 'modules', 'all', prgenv, '1.2.3'), "#%Module") for easyblock in easyblocks: # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest if os.path.basename(easyblock) == 'systemcompiler.py': # use GCC as name when testing SystemCompiler easyblock exec("def innertest(self): template_module_only_test(self, '%s', name='GCC', version='system')" % easyblock) elif os.path.basename(easyblock) == 'systemmpi.py': # use OpenMPI as name when testing SystemMPI easyblock exec("def innertest(self): template_module_only_test(self, '%s', name='OpenMPI', version='system')" % easyblock) elif os.path.basename(easyblock) == 'craytoolchain.py': # make sure that a (known) PrgEnv is included as a dependency extra_txt = 'dependencies = [("PrgEnv-gnu/1.2.3", EXTERNAL_MODULE)]' exec("def innertest(self): template_module_only_test(self, '%s', extra_txt='%s')" % (easyblock, extra_txt)) else: exec("def innertest(self): template_module_only_test(self, '%s')" % easyblock) innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % '_'.join(easyblock.replace('.py', '').split('/')) setattr(ModuleOnlyTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
def test_style_conformance(self): """Check the easyconfigs for style""" if 'pep8' not in sys.modules: print "Skipping style checks (no pep8 available)" return # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) specs = sorted(specs) result = check_easyconfigs_style(specs) self.assertEqual(result, 0, "Found code style errors (and/or warnings): %s" % result)
def suite(): """Return all easyblock initialisation tests.""" # dynamically generate a separate test for each of the available easyblocks easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) for spec in specs: # dynamically define new inner functions that can be added as class methods to InitTest exec("def innertest(self): template_easyconfig_test(self, '%s')" % spec) spec = os.path.basename(spec) innertest.__doc__ = "Test for parsing of easyconfig %s" % spec innertest.__name__ = "test__parse_easyconfig_%s" % spec # double underscore so parsing tests are run first setattr(EasyConfigTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(EasyConfigTest)
def process_all_easyconfigs(self): """Process all easyconfigs and resolve inter-easyconfig dependencies.""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) # parse all easyconfigs easyconfigs = [] for spec in specs: easyconfigs.extend(process_easyconfig(spec, build_options={'validate': False})) build_options = { 'robot_path': easyconfigs_path, 'force': True, } self.ordered_specs = resolve_dependencies(easyconfigs, build_options=build_options)
def test_style_conformance(self): """Check the easyconfigs for style""" if 'pep8' not in sys.modules: print("Skipping style checks (no pep8 available)") return # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) specs = sorted(specs) result = check_easyconfigs_style(specs) self.assertEqual( result, 0, "Found code style errors (and/or warnings): %s" % result)
def suite(): """Return all easyblock initialisation tests.""" # dynamically generate a separate test for each of the available easyblocks easyblocks_path = get_paths_for("easyblocks")[0] all_pys = glob.glob('%s/*/*.py' % easyblocks_path) easyblocks = [eb for eb in all_pys if not eb.endswith('__init__.py') and not '/test/' in eb] for easyblock in easyblocks: # dynamically define new inner functions that can be added as class methods to InitTest exec("def innertest(self): template_init_test(self, '%s')" % easyblock) innertest.__doc__ = "Test for initialisation of easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % '_'.join(easyblock.replace('.py', '').split('/')) setattr(InitTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(InitTest)
def suite(): """Return all easyblock initialisation tests.""" # dynamically generate a separate test for each of the available easyconfigs # define new inner functions that can be added as class methods to InitTest easyconfigs_path = get_paths_for('easyconfigs')[0] cnt = 0 for (subpath, _, specs) in os.walk(easyconfigs_path, topdown=True): for spec in specs: if spec.endswith('.eb') and spec != 'TEMPLATE.eb': cnt += 1 exec("def innertest(self): template_easyconfig_test(self, '%s')" % os.path.join(subpath, spec)) innertest.__doc__ = "Test for parsing of easyconfig %s" % spec # double underscore so parsing tests are run first innertest.__name__ = "test__parse_easyconfig_%s" % spec setattr(EasyConfigTest, innertest.__name__, innertest) print "Found %s easyconfigs..." % cnt return TestLoader().loadTestsFromTestCase(EasyConfigTest)
def process_all_easyconfigs(self): """Process all easyconfigs and resolve inter-easyconfig dependencies.""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) # parse all easyconfigs if they haven't been already if not self.parsed_easyconfigs: for spec in specs: self.parsed_easyconfigs.extend(process_easyconfig(spec)) # filter out external modules for ec in self.parsed_easyconfigs: for dep in ec['dependencies'][:]: if dep.get('external_module', False): ec['dependencies'].remove(dep) self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs, modules_tool(), retain_all_deps=True)
def process_all_easyconfigs(self): """Process all easyconfigs and resolve inter-easyconfig dependencies.""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) # parse all easyconfigs easyconfigs = [] for spec in specs: easyconfigs.extend( process_easyconfig(spec, build_options={'validate': False})) build_options = { 'robot_path': easyconfigs_path, 'force': True, } self.ordered_specs = resolve_dependencies(easyconfigs, build_options=build_options)
def test_changed_files_pull_request(self): """Specific checks only done for the (easyconfig) files that were changed in a pull request.""" # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR if re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)')): # target branch should be anything other than 'master'; # usually is 'develop', but could also be a release branch like '3.7.x' travis_branch = os.environ.get('TRAVIS_BRANCH', None) if travis_branch and travis_branch != 'master': if not self.parsed_easyconfigs: self.process_all_easyconfigs() # relocate to top-level directory of repository to run 'git diff' command top_dir = os.path.dirname( os.path.dirname(get_paths_for('easyconfigs')[0])) cwd = change_dir(top_dir) # get list of changed easyconfigs cmd = "git diff --name-only --diff-filter=AM %s...HEAD" % travis_branch out, ec = run_cmd(cmd, simple=False) changed_ecs_filenames = [ os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb') ] print("List of changed easyconfig files in this PR: %s" % changed_ecs_filenames) change_dir(cwd) # grab parsed easyconfigs for changed easyconfig files changed_ecs = [] for ec_fn in changed_ecs_filenames: for ec in self.parsed_easyconfigs: if ec['spec'].endswith(ec_fn): changed_ecs.append(ec['ec']) break # run checks on changed easyconfigs self.check_sha256_checksums(changed_ecs)
def suite(): """Return all easyblock --module-only tests.""" # initialize configuration (required for e.g. default modules_tool setting) cleanup() eb_go = eboptions.parse_options(args=['--prefix=%s' % TMPDIR]) config.init(eb_go.options, eb_go.get_options_by_section('config')) build_options = { # enable --force --module-only 'force': True, 'module_only': True, 'silent': True, 'suffix_modules_path': GENERAL_CLASS, 'valid_module_classes': config.module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } config.init_build_options(build_options=build_options) config.set_tmpdir() # dynamically generate a separate test for each of the available easyblocks easyblocks_path = get_paths_for("easyblocks")[0] all_pys = glob.glob('%s/*/*.py' % easyblocks_path) easyblocks = [ eb for eb in all_pys if os.path.basename(eb) != '__init__.py' and '/test/' not in eb ] # filter out no longer supported easyblocks easyblocks = [ e for e in easyblocks if os.path.basename(e) not in ['versionindependendpythonpackage.py'] ] for easyblock in easyblocks: # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest exec("def innertest(self): template_module_only_test(self, '%s')" % easyblock) innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % '_'.join( easyblock.replace('.py', '').split('/')) setattr(ModuleOnlyTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
def test_style_conformance(self): """Check the easyconfigs for style""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) specs = sorted(specs) self.mock_stderr(True) self.mock_stdout(True) result = check_easyconfigs_style(specs) stderr, stdout = self.get_stderr(), self.get_stdout() self.mock_stderr(False) self.mock_stdout(False) error_msg = '\n'.join([ "There shouldn't be any code style errors (and/or warnings), found %d:" % result, stdout, stderr, ]) self.assertEqual(result, 0, error_msg)
def __init__(self, *args, **kwargs): """Constructor.""" self.default_robot_paths = get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR, robot_path=None) or [] # set up constants to seed into config files parser, by section self.go_cfg_constants = { self.DEFAULTSECT: { 'DEFAULT_ROBOT_PATHS': (os.pathsep.join(self.default_robot_paths), "List of default robot paths ('%s'-separated)" % os.pathsep), } } # update or define go_configfiles_initenv in named arguments to pass to parent constructor go_cfg_initenv = kwargs.setdefault('go_configfiles_initenv', {}) for section, constants in self.go_cfg_constants.items(): constants = dict([(name, value) for (name, (value, _)) in constants.items()]) go_cfg_initenv.setdefault(section, {}).update(constants) super(EasyBuildOptions, self).__init__(*args, **kwargs)
def suite(): """Return all easyblock --module-only tests.""" # initialize configuration (required for e.g. default modules_tool setting) cleanup() eb_go = eboptions.parse_options(args=["--prefix=%s" % TMPDIR]) config.init(eb_go.options, eb_go.get_options_by_section("config")) build_options = { # enable --force --module-only "force": True, "module_only": True, "silent": True, "suffix_modules_path": GENERAL_CLASS, "valid_module_classes": config.module_classes(), "valid_stops": [x[0] for x in EasyBlock.get_steps()], } config.init_build_options(build_options=build_options) config.set_tmpdir() # dynamically generate a separate test for each of the available easyblocks easyblocks_path = get_paths_for("easyblocks")[0] all_pys = glob.glob("%s/*/*.py" % easyblocks_path) easyblocks = [eb for eb in all_pys if os.path.basename(eb) != "__init__.py" and "/test/" not in eb] # filter out no longer supported easyblocks, or easyblocks that are tested in a different way excluded_easyblocks = ["versionindependendpythonpackage.py"] easyblocks = [e for e in easyblocks if os.path.basename(e) not in excluded_easyblocks] for easyblock in easyblocks: # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest if os.path.basename(easyblock) == "systemcompiler.py": # use GCC as name when testing SystemCompiler easyblock exec("def innertest(self): template_module_only_test(self, '%s', name='GCC', version='system')" % easyblock) else: exec("def innertest(self): template_module_only_test(self, '%s')" % easyblock) innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % "_".join(easyblock.replace(".py", "").split("/")) setattr(ModuleOnlyTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
def test_changed_files_pull_request(self): """Specific checks only done for the (easyconfig) files that were changed in a pull request.""" # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR if re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)')): # target branch should be anything other than 'master'; # usually is 'develop', but could also be a release branch like '3.7.x' travis_branch = os.environ.get('TRAVIS_BRANCH', None) if travis_branch and travis_branch != 'master': if not self.parsed_easyconfigs: self.process_all_easyconfigs() # relocate to top-level directory of repository to run 'git diff' command top_dir = os.path.dirname(os.path.dirname(get_paths_for('easyconfigs')[0])) cwd = change_dir(top_dir) # get list of changed easyconfigs cmd = "git diff --name-only --diff-filter=AM %s...HEAD" % travis_branch out, ec = run_cmd(cmd, simple=False) changed_ecs_filenames = [os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb')] print("List of changed easyconfig files in this PR: %s" % changed_ecs_filenames) change_dir(cwd) # grab parsed easyconfigs for changed easyconfig files changed_ecs = [] for ec_fn in changed_ecs_filenames: for ec in self.parsed_easyconfigs: if ec['spec'].endswith(ec_fn): changed_ecs.append(ec['ec']) break # run checks on changed easyconfigs self.check_sha256_checksums(changed_ecs)
def suite(): """Return all easyblock initialisation tests.""" # dynamically generate a separate test for each of the available easyblocks easyblocks_path = get_paths_for("easyblocks")[0] all_pys = glob.glob('%s/*/*.py' % easyblocks_path) easyblocks = [eb for eb in all_pys if not eb.endswith('__init__.py') and not '/test/' in eb] for easyblock in easyblocks: # dynamically define new inner functions that can be added as class methods to InitTest if os.path.basename(easyblock) == 'systemcompiler.py': # use GCC as name when testing SystemCompiler easyblock exec("def innertest(self): template_init_test(self, '%s', name='GCC', version='system')" % easyblock) elif os.path.basename(easyblock) == 'systemmpi.py': # use OpenMPI as name when testing SystemMPI easyblock exec("def innertest(self): template_init_test(self, '%s', name='OpenMPI', version='system')" % easyblock) else: exec("def innertest(self): template_init_test(self, '%s')" % easyblock) innertest.__doc__ = "Test for initialisation of easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % '_'.join(easyblock.replace('.py', '').split('/')) setattr(InitTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(InitTest)
def main(testing_data=(None, None)): """ Main function: @arg options: a tuple: (options, paths, logger, logfile, hn) as defined in parse_options This function will: - read easyconfig - build software """ # disallow running EasyBuild as root if os.getuid() == 0: sys.stderr.write("ERROR: You seem to be running EasyBuild with root privileges.\n" \ "That's not wise, so let's end this here.\n" \ "Exiting.\n") sys.exit(1) # steer behavior when testing main testing = testing_data[0] is not None args, logfile = testing_data # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args # initialise logging for main if options.logtostdout: fancylogger.logToScreen(enable=True, stdout=True) else: if logfile is None: # mkstemp returns (fd,filename), fd is from os.open, not regular open! fd, logfile = tempfile.mkstemp(suffix='.log', prefix='easybuild-') os.close(fd) fancylogger.logToFile(logfile) print_msg('temporary log file in case of crash %s' % (logfile), log=None, silent=testing) global _log _log = fancylogger.getLogger(fname=False) # hello world! _log.info(this_is_easybuild()) # set strictness of filetools module if options.strict: filetools.strictness = options.strict if not options.robot is None: if options.robot: _log.info("Using robot path: %s" % options.robot) else: _log.error("No robot path specified, and unable to determine easybuild-easyconfigs install path.") # determine easybuild-easyconfigs package install path easyconfigs_paths = get_paths_for("easyconfigs", robot_path=options.robot) easyconfigs_pkg_full_path = None search_path = os.getcwd() if easyconfigs_paths: easyconfigs_pkg_full_path = easyconfigs_paths[0] if not options.robot: search_path = easyconfigs_pkg_full_path else: search_path = options.robot else: _log.info("Failed to determine install path for easybuild-easyconfigs package.") if options.robot: easyconfigs_paths = [options.robot] + easyconfigs_paths # initialise the easybuild configuration config.init(options, eb_go.get_options_by_section('config')) # search for easyconfigs if options.search: search_file(search_path, options.search, silent=testing) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, software_build_specs) = process_software_build_specs(options) paths = [] if len(orig_paths) == 0: if software_build_specs.has_key('name'): paths = [obtain_path(software_build_specs, easyconfigs_paths, try_to_generate=try_to_generate, exit_on_error=not testing)] elif not any([options.aggregate_regtest, options.search, options.regtest]): print_error(("Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) else: # look for easyconfigs with relative paths in easybuild-easyconfigs package, # unless they we found at the given relative paths if easyconfigs_pkg_full_path: # create a mapping from filename to path in easybuild-easyconfigs package install path easyconfigs_map = {} for (subpath, _, filenames) in os.walk(easyconfigs_pkg_full_path): for filename in filenames: easyconfigs_map.update({filename: os.path.join(subpath, filename)}) # try and find non-existing non-absolute eaysconfig paths in easybuild-easyconfigs package install path for idx, orig_path in enumerate(orig_paths): if not os.path.isabs(orig_path) and not os.path.exists(orig_path): if orig_path in easyconfigs_map: _log.info("Found %s in %s: %s" % (orig_path, easyconfigs_pkg_full_path, easyconfigs_map[orig_path])) orig_paths[idx] = easyconfigs_map[orig_path] # indicate that specified paths do not contain generated easyconfig files paths = [(path, False) for path in orig_paths] _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") if paths: regtest_ok = regtest(options, [path[0] for path in paths]) else: # fallback: easybuild-easyconfigs install path regtest_ok = regtest(options, [easyconfigs_pkg_full_path]) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 if any([options.search, options.regtest]): cleanup_logfile_and_exit(logfile, testing, True) # building a dependency graph implies force, so that all dependencies are retained # and also skips validation of easyconfigs (e.g. checking os dependencies) validate_easyconfigs = True retain_all_deps = False if options.dep_graph: _log.info("Enabling force to generate dependency graph.") options.force = True validate_easyconfigs = False retain_all_deps = True # read easyconfig files easyconfigs = [] for (path, generated) in paths: path = os.path.abspath(path) if not (os.path.exists(path)): print_error("Can't find path %s" % path) try: files = find_easyconfigs(path) for f in files: if not generated and try_to_generate and software_build_specs: ec_file = easyconfig.tools.tweak(f, None, software_build_specs) else: ec_file = f easyconfigs.extend(process_easyconfig(ec_file, options.only_blocks, validate=validate_easyconfigs)) except IOError, err: _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))
class EasyConfigTest(TestCase): """Baseclass for easyconfig testcases.""" # initialize configuration (required for e.g. default modules_tool setting) eb_go = eboptions.parse_options() config.init(eb_go.options, eb_go.get_options_by_section('config')) build_options = { 'check_osdeps': False, 'external_modules_metadata': {}, 'force': True, 'optarch': 'test', 'robot_path': get_paths_for("easyconfigs")[0], 'silent': True, 'suffix_modules_path': GENERAL_CLASS, 'valid_module_classes': config.module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } config.init_build_options(build_options=build_options) set_tmpdir() del eb_go # put dummy 'craype-test' module in place, which is required for parsing easyconfigs using Cray* toolchains TMPDIR = tempfile.mkdtemp() os.environ['MODULEPATH'] = TMPDIR write_file(os.path.join(TMPDIR, 'craype-test'), '#%Module\n') log = fancylogger.getLogger("EasyConfigTest", fname=False) # make sure a logger is present for main eb_main._log = log ordered_specs = None parsed_easyconfigs = [] def process_all_easyconfigs(self): """Process all easyconfigs and resolve inter-easyconfig dependencies.""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) # parse all easyconfigs if they haven't been already if not self.parsed_easyconfigs: for spec in specs: self.parsed_easyconfigs.extend(process_easyconfig(spec)) # filter out external modules for ec in self.parsed_easyconfigs: for dep in ec['dependencies'][:]: if dep.get('external_module', False): ec['dependencies'].remove(dep) self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs, modules_tool(), retain_all_deps=True) def test_dep_graph(self): """Unit test that builds a full dependency graph.""" # pygraph dependencies required for constructing dependency graph are not available prior to Python 2.6 if LooseVersion( sys.version) >= LooseVersion('2.6') and single_tests_ok: # temporary file for dep graph (hn, fn) = tempfile.mkstemp(suffix='.dot') os.close(hn) if self.ordered_specs is None: self.process_all_easyconfigs() dep_graph(fn, self.ordered_specs) remove_file(fn) else: print "(skipped dep graph test)" def test_conflicts(self): """Check whether any conflicts occur in software dependency graphs.""" if not single_tests_ok: print "(skipped conflicts test)" return if self.ordered_specs is None: self.process_all_easyconfigs() self.assertFalse( check_conflicts(self.ordered_specs, modules_tool(), check_inter_ec_conflicts=False), "No conflicts detected") def test_dep_versions_per_toolchain_generation(self): """ Check whether there's only one dependency version per toolchain generation actively used. This is enforced to try and limit the chance of running into conflicts when multiple modules built with the same toolchain are loaded together. """ if self.ordered_specs is None: self.process_all_easyconfigs() def get_deps_for(ec): """Get list of (direct) dependencies for specified easyconfig.""" deps = [] for dep in ec['ec']['dependencies']: dep_mod_name = dep['full_mod_name'] deps.append((dep['name'], dep['version'], dep['versionsuffix'], dep_mod_name)) res = [ x for x in self.ordered_specs if x['full_mod_name'] == dep_mod_name ] if len(res) == 1: deps.extend(get_deps_for(res[0])) else: raise EasyBuildError( "Failed to find %s in ordered list of easyconfigs", dep_mod_name) return deps def check_dep_vars(dep, dep_vars): """Check whether available variants of a particular dependency are acceptable or not.""" # 'guilty' until proven 'innocent' res = False # filter out binutils with empty versionsuffix which is used to build toolchain compiler if dep == 'binutils' and len(dep_vars) > 1: empty_vsuff_vars = [ v for v in dep_vars.keys() if v.endswith('versionsuffix: ') ] if len(empty_vsuff_vars) == 1: dep_vars = dict((k, v) for (k, v) in dep_vars.items() if k != empty_vsuff_vars[0]) # multiple variants of HTSlib is OK as long as they are deps for a matching version of BCFtools elif dep == 'HTSlib' and len(dep_vars) > 1: for key, ecs in dep_vars.items(): # filter out HTSlib variants that are only used as dependency for BCFtools with same version htslib_ver = re.search('^version: (?P<ver>[^;]+);', key).group('ver') if all( ec.startswith('BCFtools-%s-' % htslib_ver) for ec in ecs): dep_vars.pop(key) # filter out FFTW and imkl with -serial versionsuffix which are used in non-MPI subtoolchains elif dep in ['FFTW', 'imkl']: serial_vsuff_vars = [ v for v in dep_vars.keys() if v.endswith('versionsuffix: -serial') ] if len(serial_vsuff_vars) == 1: dep_vars = dict((k, v) for (k, v) in dep_vars.items() if k != serial_vsuff_vars[0]) # for some dependencies, we allow exceptions for software that depends on a particular version, # as long as that's indicated by the versionsuffix elif dep in ['Boost', 'R', 'PLUMED'] and len(dep_vars) > 1: for key in dep_vars.keys(): dep_ver = re.search('^version: (?P<ver>[^;]+);', key).group('ver') # filter out dep version if all easyconfig filenames using it include specific dep version if all( re.search('-%s-%s' % (dep, dep_ver), v) for v in dep_vars[key]): dep_vars.pop(key) # always retain at least one dep variant if len(dep_vars) == 1: break # filter R dep for a specific version of Python 2.x if dep == 'R' and len(dep_vars) > 1: for key in dep_vars.keys(): if '; versionsuffix: -Python-2' in key: dep_vars.pop(key) # always retain at least one variant if len(dep_vars) == 1: break # filter out Java 'wrapper' # i.e. if the version of one is a prefix of the version of the other one (e.g. 1.8 & 1.8.0_181) elif dep == 'Java' and len(dep_vars) == 2: key1, key2 = sorted(dep_vars.keys()) ver1, ver2 = [k.split(';')[0] for k in [key1, key2]] if ver1.startswith(ver2): dep_vars.pop(key2) elif ver2.startswith(ver1): dep_vars.pop(key1) # filter out variants that are specific to a particular version of CUDA cuda_dep_vars = [v for v in dep_vars.keys() if '-CUDA' in v] if len(dep_vars) > len(cuda_dep_vars): for key in dep_vars.keys(): if re.search('; versionsuffix: .*-CUDA-[0-9.]+', key): dep_vars.pop(key) # some software packages require an old version of a particular dependency old_dep_versions = { # libxc (CP2K & ABINIT require libxc 2.x or 3.x) 'libxc': r'[23]\.', # OPERA requires SAMtools 0.x 'SAMtools': r'0\.', # Kraken 1.0 requires Jellyfish 1.x 'Jellyfish': r'1\.', } if dep in old_dep_versions and len(dep_vars) > 1: for key in dep_vars.keys(): # filter out known old dependency versions if re.search('^version: %s' % old_dep_versions[dep], key): dep_vars.pop(key) # only single variant is always OK if len(dep_vars) == 1: res = True elif len(dep_vars) == 2 and dep in ['Python', 'Tkinter']: # for Python & Tkinter, it's OK to have on 2.x and one 3.x version v2_dep_vars = [ x for x in dep_vars.keys() if x.startswith('version: 2.') ] v3_dep_vars = [ x for x in dep_vars.keys() if x.startswith('version: 3.') ] if len(v2_dep_vars) == 1 and len(v3_dep_vars) == 1: res = True # two variants is OK if one is for Python 2.x and the other is for Python 3.x (based on versionsuffix) elif len(dep_vars) == 2: py2_dep_vars = [ x for x in dep_vars.keys() if '; versionsuffix: -Python-2.' in x ] py3_dep_vars = [ x for x in dep_vars.keys() if '; versionsuffix: -Python-3.' in x ] if len(py2_dep_vars) == 1 and len(py3_dep_vars) == 1: res = True return res # some software also follows <year>{a,b} versioning scheme, # which throws off the pattern matching done below for toolchain versions false_positives_regex = re.compile('^MATLAB-Engine-20[0-9][0-9][ab]') # restrict to checking dependencies of easyconfigs using common toolchains (start with 2018a) # and GCCcore subtoolchain for common toolchains, starting with GCCcore 7.x for pattern in [ '201[89][ab]', '20[2-9][0-9][ab]', 'GCCcore-[7-9]\.[0-9]' ]: all_deps = {} regex = re.compile('^.*-(?P<tc_gen>%s).*\.eb$' % pattern) # collect variants for all dependencies of easyconfigs that use a toolchain that matches for ec in self.ordered_specs: ec_file = os.path.basename(ec['spec']) # take into account software which also follows a <year>{a,b} versioning scheme ec_file = false_positives_regex.sub('', ec_file) res = regex.match(ec_file) if res: tc_gen = res.group('tc_gen') all_deps_tc_gen = all_deps.setdefault(tc_gen, {}) for dep_name, dep_ver, dep_versuff, dep_mod_name in get_deps_for( ec): dep_variants = all_deps_tc_gen.setdefault(dep_name, {}) # a variant is defined by version + versionsuffix variant = "version: %s; versionsuffix: %s" % ( dep_ver, dep_versuff) # keep track of which easyconfig this is a dependency dep_variants.setdefault(variant, set()).add(ec_file) # check which dependencies have more than 1 variant multi_dep_vars, multi_dep_vars_msg = [], '' for tc_gen in sorted(all_deps.keys()): for dep in sorted(all_deps[tc_gen].keys()): dep_vars = all_deps[tc_gen][dep] if not check_dep_vars(dep, dep_vars): multi_dep_vars.append(dep) multi_dep_vars_msg += "\nfound %s variants of '%s' dependency " % ( len(dep_vars), dep) multi_dep_vars_msg += "in easyconfigs using '%s' toolchain generation\n* " % tc_gen multi_dep_vars_msg += '\n* '.join( "%s as dep for %s" % v for v in sorted(dep_vars.items())) multi_dep_vars_msg += '\n' error_msg = "No multi-variant deps found for '%s' easyconfigs:\n%s" % ( regex.pattern, multi_dep_vars_msg) self.assertFalse(multi_dep_vars, error_msg) def test_sanity_check_paths(self): """Make sure specified sanity check paths adher to the requirements.""" if self.ordered_specs is None: self.process_all_easyconfigs() for ec in self.parsed_easyconfigs: ec_scp = ec['ec']['sanity_check_paths'] if ec_scp != {}: # if sanity_check_paths is specified (i.e., non-default), it must adher to the requirements # both 'files' and 'dirs' keys, both with list values and with at least one a non-empty list error_msg = "sanity_check_paths for %s does not meet requirements: %s" % ( ec['spec'], ec_scp) self.assertEqual(sorted(ec_scp.keys()), ['dirs', 'files'], error_msg) self.assertTrue(isinstance(ec_scp['dirs'], list), error_msg) self.assertTrue(isinstance(ec_scp['files'], list), error_msg) self.assertTrue(ec_scp['dirs'] or ec_scp['files'], error_msg) def test_easyconfig_locations(self): """Make sure all easyconfigs files are in the right location.""" easyconfig_dirs_regex = re.compile( r'/easybuild/easyconfigs/[0a-z]/[^/]+$') topdir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) for (dirpath, _, filenames) in os.walk(topdir): # ignore git/svn dirs & archived easyconfigs if '/.git/' in dirpath or '/.svn/' in dirpath or '__archive__' in dirpath: continue # check whether list of .eb files is non-empty easyconfig_files = [fn for fn in filenames if fn.endswith('eb')] if easyconfig_files: # check whether path matches required pattern if not easyconfig_dirs_regex.search(dirpath): # only exception: TEMPLATE.eb if not (dirpath.endswith('/easybuild/easyconfigs') and filenames == ['TEMPLATE.eb']): self.assertTrue( False, "List of easyconfig files in %s is empty: %s" % (dirpath, filenames)) def check_sha256_checksums(self, changed_ecs): """Make sure changed easyconfigs have SHA256 checksums in place.""" # list of software for which checksums can not be required, # e.g. because 'source' files need to be constructed manually whitelist = ['Kent_tools-*', 'MATLAB-*', 'OCaml-*'] # the check_sha256_checksums function (again) creates an EasyBlock instance # for easyconfigs using the Bundle easyblock, this is a problem because the 'sources' easyconfig parameter # is updated in place (sources for components are added the 'parent' sources) in Bundle's __init__; # therefore, we need to reset 'sources' to an empty list here if Bundle is used... for ec in changed_ecs: if ec['easyblock'] == 'Bundle': ec['sources'] = [] # filter out deprecated easyconfigs retained_changed_ecs = [] for ec in changed_ecs: if not ec['deprecated']: retained_changed_ecs.append(ec) checksum_issues = check_sha256_checksums(retained_changed_ecs, whitelist=whitelist) self.assertTrue( len(checksum_issues) == 0, "No checksum issues:\n%s" % '\n'.join(checksum_issues)) def check_python_packages(self, changed_ecs): """Several checks for easyconfigs that install (bundles of) Python packages.""" # MATLAB-Engine, PyTorch do not support installation with 'pip' whitelist_pip = ['MATLAB-Engine-*', 'PyTorch-*'] failing_checks = [] for ec in changed_ecs: ec_fn = os.path.basename(ec.path) easyblock = ec.get('easyblock') exts_defaultclass = ec.get('exts_defaultclass') download_dep_fail = ec.get('download_dep_fail') exts_download_dep_fail = ec.get('exts_download_dep_fail') use_pip = ec.get('use_pip') # download_dep_fail should be set when using PythonPackage if easyblock == 'PythonPackage': if not download_dep_fail: failing_checks.append("'download_dep_fail' set in %s" % ec_fn) # use_pip should be set when using PythonPackage or PythonBundle (except for whitelisted easyconfigs) if easyblock in ['PythonBundle', 'PythonPackage']: if not use_pip and not any( re.match(regex, ec_fn) for regex in whitelist_pip): failing_checks.append("'use_pip' set in %s" % ec_fn) # download_dep_fail is enabled automatically in PythonBundle easyblock, so shouldn't be set if easyblock == 'PythonBundle': if download_dep_fail or exts_download_dep_fail: fail = "'*download_dep_fail' set in %s (shouldn't, since PythonBundle easyblock is used)" % ec_fn failing_checks.append(fail) elif exts_defaultclass == 'PythonPackage': # bundle of Python packages should use PythonBundle if easyblock == 'Bundle': fail = "'PythonBundle' easyblock is used for bundle of Python packages in %s" % ec_fn failing_checks.append(fail) else: # both download_dep_fail and use_pip should be set via exts_default_options # when installing Python packages as extensions exts_default_options = ec.get('exts_default_options', {}) for key in ['download_dep_fail', 'use_pip']: if not exts_default_options.get(key): failing_checks.append( "'%s' set in exts_default_options in %s" % (key, ec_fn)) # if Python is a dependency, that should be reflected in the versionsuffix if any(dep['name'] == 'Python' for dep in ec['dependencies']): if not re.search(r'-Python-[23]\.[0-9]+\.[0-9]+', ec['versionsuffix']): failing_checks.append( "'-Python-%%(pyver)s' included in versionsuffix in %s" % ec_fn) self.assertFalse(failing_checks, '\n'.join(failing_checks)) def test_changed_files_pull_request(self): """Specific checks only done for the (easyconfig) files that were changed in a pull request.""" # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR if re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)')): # target branch should be anything other than 'master'; # usually is 'develop', but could also be a release branch like '3.7.x' travis_branch = os.environ.get('TRAVIS_BRANCH', None) if travis_branch and travis_branch != 'master': if not self.parsed_easyconfigs: self.process_all_easyconfigs() # relocate to top-level directory of repository to run 'git diff' command top_dir = os.path.dirname( os.path.dirname(get_paths_for('easyconfigs')[0])) cwd = change_dir(top_dir) # get list of changed easyconfigs cmd = "git diff --name-only --diff-filter=AM %s...HEAD" % travis_branch out, ec = run_cmd(cmd, simple=False) changed_ecs_filenames = [ os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb') ] print("\nList of changed easyconfig files in this PR: %s" % '\n'.join(changed_ecs_filenames)) change_dir(cwd) # grab parsed easyconfigs for changed easyconfig files changed_ecs = [] for ec_fn in changed_ecs_filenames: match = None for ec in self.parsed_easyconfigs: if os.path.basename(ec['spec']) == ec_fn: match = ec['ec'] break if match: changed_ecs.append(match) else: # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR... # so as a last resort, try to find the easyconfig file in __archive__ easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/__archive__/*/*/%s' % (easyconfigs_path, ec_fn)) if len(specs) == 1: ec = process_easyconfig(specs[0])[0] changed_ecs.append(ec['ec']) else: error_msg = "Failed to find parsed easyconfig for %s" % ec_fn error_msg += " (and could not isolate it in easyconfigs archive either)" self.assertTrue(False, error_msg) # run checks on changed easyconfigs self.check_sha256_checksums(changed_ecs) self.check_python_packages(changed_ecs) def test_zzz_cleanup(self): """Dummy test to clean up global temporary directory.""" shutil.rmtree(self.TMPDIR)
def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): """ Main function: parse command line options, and act accordingly. :param args: command line arguments to use :param logfile: log file to use :param do_build: whether or not to actually perform the build :param testing: enable testing mode """ # purposely session state very early, to avoid modules loaded by EasyBuild meddling in init_session_state = session_state() eb_go, cfg_settings = set_up_configuration(args=args, logfile=logfile, testing=testing) options, orig_paths = eb_go.options, eb_go.args global _log (build_specs, _log, logfile, robot_path, search_query, eb_tmpdir, try_to_generate, tweaked_ecs_paths) = cfg_settings # load hook implementations (if any) hooks = load_hooks(options.hooks) run_hook(START, hooks) if modtool is None: modtool = modules_tool(testing=testing) # check whether any (EasyBuild-generated) modules are loaded already in the current session modtool.check_loaded_modules() if options.last_log: # print location to last log file, and exit last_log = find_last_log(logfile) or '(none)' print_msg(last_log, log=_log, prefix=False) # check whether packaging is supported when it's being used if options.package: check_pkg_support() else: _log.debug("Packaging not enabled, so not checking for packaging support.") # search for easyconfigs, if a query is specified if search_query: search_easyconfigs(search_query, short=options.search_short, filename_only=options.search_filename, terse=options.terse) # GitHub options that warrant a silent cleanup & exit if options.check_github: check_github() elif options.install_github_token: install_github_token(options.github_user, silent=build_option('silent')) elif options.close_pr: close_pr(options.close_pr, motivation_msg=options.close_pr_msg) elif options.list_prs: print(list_prs(options.list_prs)) elif options.merge_pr: merge_pr(options.merge_pr) elif options.review_pr: print(review_pr(pr=options.review_pr, colored=use_color(options.color))) elif options.list_installed_software: detailed = options.list_installed_software == 'detailed' print(list_software(output_format=options.output_format, detailed=detailed, only_installed=True)) elif options.list_software: print(list_software(output_format=options.output_format, detailed=options.list_software == 'detailed')) # non-verbose cleanup after handling GitHub integration stuff or printing terse info early_stop_options = [ options.check_github, options.install_github_token, options.list_installed_software, options.list_software, options.close_pr, options.list_prs, options.merge_pr, options.review_pr, options.terse, search_query, ] if any(early_stop_options): clean_exit(logfile, eb_tmpdir, testing, silent=True) # update session state eb_config = eb_go.generate_cmd_line(add_default=True) modlist = modtool.list() # build options must be initialized first before 'module list' works init_session_state.update({'easybuild_configuration': eb_config}) init_session_state.update({'module_list': modlist}) _log.debug("Initial session state: %s" % init_session_state) # determine easybuild-easyconfigs package install path easyconfigs_pkg_paths = get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR) if not easyconfigs_pkg_paths: _log.warning("Failed to determine install path for easybuild-easyconfigs package.") if options.install_latest_eb_release: if orig_paths: raise EasyBuildError("Installing the latest EasyBuild release can not be combined with installing " "other easyconfigs") else: eb_file = find_easybuild_easyconfig() orig_paths.append(eb_file) categorized_paths = categorize_files_by_type(orig_paths) # command line options that do not require any easyconfigs to be specified new_update_preview_pr = options.new_pr or options.update_pr or options.preview_pr no_ec_opts = [options.aggregate_regtest, options.regtest, search_query, new_update_preview_pr] # determine paths to easyconfigs determined_paths = det_easyconfig_paths(categorized_paths['easyconfigs']) if determined_paths: # transform paths into tuples, use 'False' to indicate the corresponding easyconfig files were not generated paths = [(p, False) for p in determined_paths] else: if 'name' in build_specs: # try to obtain or generate an easyconfig file via build specifications if a software name is provided paths = find_easyconfigs_by_specs(build_specs, robot_path, try_to_generate, testing=testing) elif any(no_ec_opts): paths = determined_paths else: print_error(("Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") # fallback: easybuild-easyconfigs install path regtest_ok = regtest([path[0] for path in paths] or easyconfigs_pkg_paths, modtool) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 # read easyconfig files easyconfigs, generated_ecs = parse_easyconfigs(paths, validate=not options.inject_checksums) # handle --check-contrib & --check-style options if run_contrib_style_checks([ec['ec'] for ec in easyconfigs], options.check_contrib, options.check_style): clean_exit(logfile, eb_tmpdir, testing) # verify easyconfig filenames, if desired if options.verify_easyconfig_filenames: _log.info("Verifying easyconfig filenames...") for easyconfig in easyconfigs: verify_easyconfig_filename(easyconfig['spec'], easyconfig['ec'], parsed_ec=easyconfig['ec']) # tweak obtained easyconfig files, if requested # don't try and tweak anything if easyconfigs were generated, since building a full dep graph will fail # if easyconfig files for the dependencies are not available if try_to_generate and build_specs and not generated_ecs: easyconfigs = tweak(easyconfigs, build_specs, modtool, targetdirs=tweaked_ecs_paths) if options.containerize: # if --containerize/-C create a container recipe (and optionally container image), and stop containerize(easyconfigs) clean_exit(logfile, eb_tmpdir, testing) forced = options.force or options.rebuild dry_run_mode = options.dry_run or options.dry_run_short # skip modules that are already installed unless forced, or unless an option is used that warrants not skipping if not (forced or dry_run_mode or options.extended_dry_run or new_update_preview_pr or options.inject_checksums): retained_ecs = skip_available(easyconfigs, modtool) if not testing: for skipped_ec in [ec for ec in easyconfigs if ec not in retained_ecs]: print_msg("%s is already installed (module found), skipping" % skipped_ec['full_mod_name']) easyconfigs = retained_ecs # determine an order that will allow all specs in the set to build if len(easyconfigs) > 0: # resolve dependencies if robot is enabled, except in dry run mode # one exception: deps *are* resolved with --new-pr or --update-pr when dry run mode is enabled if options.robot and (not dry_run_mode or new_update_preview_pr): print_msg("resolving dependencies ...", log=_log, silent=testing) ordered_ecs = resolve_dependencies(easyconfigs, modtool) else: ordered_ecs = easyconfigs elif new_update_preview_pr: ordered_ecs = None else: print_msg("No easyconfigs left to be built.", log=_log, silent=testing) ordered_ecs = [] # creating/updating PRs if new_update_preview_pr: if options.new_pr: new_pr(categorized_paths, ordered_ecs, title=options.pr_title, descr=options.pr_descr, commit_msg=options.pr_commit_msg) elif options.preview_pr: print(review_pr(paths=determined_paths, colored=use_color(options.color))) else: update_pr(options.update_pr, categorized_paths, ordered_ecs, commit_msg=options.pr_commit_msg) # dry_run: print all easyconfigs and dependencies, and whether they are already built elif dry_run_mode: txt = dry_run(easyconfigs, modtool, short=not options.dry_run) print_msg(txt, log=_log, silent=testing, prefix=False) elif options.check_conflicts: if check_conflicts(easyconfigs, modtool): print_error("One or more conflicts detected!") sys.exit(1) else: print_msg("\nNo conflicts detected!\n", prefix=False) # dump source script to set up build environment elif options.dump_env_script: dump_env_script(easyconfigs) elif options.inject_checksums: inject_checksums(ordered_ecs, options.inject_checksums) # cleanup and exit after dry run, searching easyconfigs or submitting regression test stop_options = [options.check_conflicts, dry_run_mode, options.dump_env_script, options.inject_checksums] if any(no_ec_opts) or any(stop_options): clean_exit(logfile, eb_tmpdir, testing) # create dependency graph and exit if options.dep_graph: _log.info("Creating dependency graph %s" % options.dep_graph) dep_graph(options.dep_graph, ordered_ecs) clean_exit(logfile, eb_tmpdir, testing, silent=True) # submit build as job(s), clean up and exit if options.job: submit_jobs(ordered_ecs, eb_go.generate_cmd_line(), testing=testing) if not testing: print_msg("Submitted parallel build jobs, exiting now") clean_exit(logfile, eb_tmpdir, testing) # build software, will exit when errors occurs (except when testing) if not testing or (testing and do_build): exit_on_failure = not (options.dump_test_report or options.upload_test_report) ecs_with_res = build_and_install_software(ordered_ecs, init_session_state, exit_on_failure=exit_on_failure) else: ecs_with_res = [(ec, {}) for ec in ordered_ecs] correct_builds_cnt = len([ec_res for (_, ec_res) in ecs_with_res if ec_res.get('success', False)]) overall_success = correct_builds_cnt == len(ordered_ecs) success_msg = "Build succeeded for %s out of %s" % (correct_builds_cnt, len(ordered_ecs)) repo = init_repository(get_repository(), get_repositorypath()) repo.cleanup() # dump/upload overall test report test_report_msg = overall_test_report(ecs_with_res, len(paths), overall_success, success_msg, init_session_state) if test_report_msg is not None: print_msg(test_report_msg) print_msg(success_msg, log=_log, silent=testing) # cleanup and spec files for ec in easyconfigs: if 'original_spec' in ec and os.path.isfile(ec['spec']): os.remove(ec['spec']) run_hook(END, hooks) # stop logging and cleanup tmp log file, unless one build failed (individual logs are located in eb_tmpdir) stop_logging(logfile, logtostdout=options.logtostdout) if overall_success: cleanup(logfile, eb_tmpdir, testing)
def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): """ Main function: parse command line options, and act accordingly. :param args: command line arguments to use :param logfile: log file to use :param do_build: whether or not to actually perform the build :param testing: enable testing mode """ # purposely session state very early, to avoid modules loaded by EasyBuild meddling in init_session_state = session_state() # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args # set umask (as early as possible) if options.umask is not None: new_umask = int(options.umask, 8) old_umask = os.umask(new_umask) # set by option parsers via set_tmpdir eb_tmpdir = tempfile.gettempdir() search_query = options.search or options.search_filename or options.search_short # initialise logging for main global _log _log, logfile = init_logging(logfile, logtostdout=options.logtostdout, silent=(testing or options.terse or search_query), colorize=options.color) # disallow running EasyBuild as root (by default) check_root_usage( allow_use_as_root=options.allow_use_as_root_and_accept_consequences) # log startup info eb_cmd_line = eb_go.generate_cmd_line() + eb_go.args log_start(eb_cmd_line, eb_tmpdir) if options.umask is not None: _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask))) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, build_specs) = process_software_build_specs(options) # determine robot path # --try-X, --dep-graph, --search use robot path for searching, so enable it with path of installed easyconfigs tweaked_ecs = try_to_generate and build_specs tweaked_ecs_paths, pr_path = alt_easyconfig_paths(eb_tmpdir, tweaked_ecs=tweaked_ecs, from_pr=options.from_pr) auto_robot = try_to_generate or options.check_conflicts or options.dep_graph or search_query robot_path = det_robot_path(options.robot_paths, tweaked_ecs_paths, pr_path, auto_robot=auto_robot) _log.debug("Full robot path: %s" % robot_path) # configure & initialize build options config_options_dict = eb_go.get_options_by_section('config') build_options = { 'build_specs': build_specs, 'command_line': eb_cmd_line, 'external_modules_metadata': parse_external_modules_metadata(options.external_modules_metadata), 'pr_path': pr_path, 'robot_path': robot_path, 'silent': testing, 'try_to_generate': try_to_generate, 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } # initialise the EasyBuild configuration & build options config.init(options, config_options_dict) config.init_build_options(build_options=build_options, cmdline_options=options) if modtool is None: modtool = modules_tool(testing=testing) # check whether any (EasyBuild-generated) modules are loaded already in the current session modtool.check_loaded_modules() if options.last_log: # print location to last log file, and exit last_log = find_last_log(logfile) or '(none)' print_msg(last_log, log=_log, prefix=False) # check whether packaging is supported when it's being used if options.package: check_pkg_support() else: _log.debug( "Packaging not enabled, so not checking for packaging support.") # search for easyconfigs, if a query is specified if search_query: search_easyconfigs(search_query, short=options.search_short, filename_only=options.search_filename, terse=options.terse) # GitHub options that warrant a silent cleanup & exit if options.check_github: check_github() elif options.install_github_token: install_github_token(options.github_user, silent=build_option('silent')) elif options.merge_pr: merge_pr(options.merge_pr) elif options.review_pr: print review_pr(pr=options.review_pr, colored=use_color(options.color)) elif options.list_installed_software: detailed = options.list_installed_software == 'detailed' print list_software(output_format=options.output_format, detailed=detailed, only_installed=True) elif options.list_software: print list_software(output_format=options.output_format, detailed=options.list_software == 'detailed') # non-verbose cleanup after handling GitHub integration stuff or printing terse info early_stop_options = [ options.check_github, options.install_github_token, options.list_installed_software, options.list_software, options.merge_pr, options.review_pr, options.terse, search_query, ] if any(early_stop_options): cleanup(logfile, eb_tmpdir, testing, silent=True) sys.exit(0) # update session state eb_config = eb_go.generate_cmd_line(add_default=True) modlist = modtool.list( ) # build options must be initialized first before 'module list' works init_session_state.update({'easybuild_configuration': eb_config}) init_session_state.update({'module_list': modlist}) _log.debug("Initial session state: %s" % init_session_state) # determine easybuild-easyconfigs package install path easyconfigs_pkg_paths = get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR) if not easyconfigs_pkg_paths: _log.warning( "Failed to determine install path for easybuild-easyconfigs package." ) if options.install_latest_eb_release: if orig_paths: raise EasyBuildError( "Installing the latest EasyBuild release can not be combined with installing " "other easyconfigs") else: eb_file = find_easybuild_easyconfig() orig_paths.append(eb_file) categorized_paths = categorize_files_by_type(orig_paths) # command line options that do not require any easyconfigs to be specified new_update_preview_pr = options.new_pr or options.update_pr or options.preview_pr no_ec_opts = [ options.aggregate_regtest, options.regtest, search_query, new_update_preview_pr ] # determine paths to easyconfigs determined_paths = det_easyconfig_paths(categorized_paths['easyconfigs']) if determined_paths: # transform paths into tuples, use 'False' to indicate the corresponding easyconfig files were not generated paths = [(p, False) for p in determined_paths] else: if 'name' in build_specs: # try to obtain or generate an easyconfig file via build specifications if a software name is provided paths = find_easyconfigs_by_specs(build_specs, robot_path, try_to_generate, testing=testing) elif any(no_ec_opts): paths = determined_paths else: print_error(( "Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") # fallback: easybuild-easyconfigs install path regtest_ok = regtest([path[0] for path in paths] or easyconfigs_pkg_paths, modtool) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 if options.check_style: _log.debug("Running style check...") if cmdline_easyconfigs_style_check([path[0] for path in paths]): print_msg("All style checks passed!", prefix=False) cleanup(logfile, eb_tmpdir, testing) sys.exit(0) else: raise EasyBuildError("One or more style checks FAILED!") # read easyconfig files easyconfigs, generated_ecs = parse_easyconfigs( paths, validate=not options.inject_checksums) # verify easyconfig filenames, if desired if options.verify_easyconfig_filenames: _log.info("Verifying easyconfig filenames...") for easyconfig in easyconfigs: verify_easyconfig_filename(easyconfig['spec'], easyconfig['ec'], parsed_ec=easyconfig['ec']) # tweak obtained easyconfig files, if requested # don't try and tweak anything if easyconfigs were generated, since building a full dep graph will fail # if easyconfig files for the dependencies are not available if try_to_generate and build_specs and not generated_ecs: easyconfigs = tweak(easyconfigs, build_specs, modtool, targetdirs=tweaked_ecs_paths) if options.containerize: containerize(easyconfigs) cleanup(logfile, eb_tmpdir, testing) sys.exit(0) forced = options.force or options.rebuild dry_run_mode = options.dry_run or options.dry_run_short # skip modules that are already installed unless forced, or unless an option is used that warrants not skipping if not (forced or dry_run_mode or options.extended_dry_run or new_update_preview_pr or options.inject_checksums): retained_ecs = skip_available(easyconfigs, modtool) if not testing: for skipped_ec in [ ec for ec in easyconfigs if ec not in retained_ecs ]: print_msg("%s is already installed (module found), skipping" % skipped_ec['full_mod_name']) easyconfigs = retained_ecs # determine an order that will allow all specs in the set to build if len(easyconfigs) > 0: # resolve dependencies if robot is enabled, except in dry run mode # one exception: deps *are* resolved with --new-pr or --update-pr when dry run mode is enabled if options.robot and (not dry_run_mode or new_update_preview_pr): print_msg("resolving dependencies ...", log=_log, silent=testing) ordered_ecs = resolve_dependencies(easyconfigs, modtool) else: ordered_ecs = easyconfigs elif new_update_preview_pr: ordered_ecs = None else: print_msg("No easyconfigs left to be built.", log=_log, silent=testing) ordered_ecs = [] # creating/updating PRs if new_update_preview_pr: if options.new_pr: new_pr(categorized_paths, ordered_ecs, title=options.pr_title, descr=options.pr_descr, commit_msg=options.pr_commit_msg) elif options.preview_pr: print review_pr(paths=determined_paths, colored=use_color(options.color)) else: update_pr(options.update_pr, categorized_paths, ordered_ecs, commit_msg=options.pr_commit_msg) # dry_run: print all easyconfigs and dependencies, and whether they are already built elif dry_run_mode: txt = dry_run(easyconfigs, modtool, short=not options.dry_run) print_msg(txt, log=_log, silent=testing, prefix=False) elif options.check_conflicts: if check_conflicts(easyconfigs, modtool): print_error("One or more conflicts detected!") sys.exit(1) else: print_msg("\nNo conflicts detected!\n", prefix=False) # dump source script to set up build environment elif options.dump_env_script: dump_env_script(easyconfigs) elif options.inject_checksums: inject_checksums(ordered_ecs, options.inject_checksums) # cleanup and exit after dry run, searching easyconfigs or submitting regression test stop_options = [ options.check_conflicts, dry_run_mode, options.dump_env_script, options.inject_checksums ] if any(no_ec_opts) or any(stop_options): cleanup(logfile, eb_tmpdir, testing) sys.exit(0) # create dependency graph and exit if options.dep_graph: _log.info("Creating dependency graph %s" % options.dep_graph) dep_graph(options.dep_graph, ordered_ecs) cleanup(logfile, eb_tmpdir, testing, silent=True) sys.exit(0) # submit build as job(s), clean up and exit if options.job: submit_jobs(ordered_ecs, eb_go.generate_cmd_line(), testing=testing) if not testing: print_msg("Submitted parallel build jobs, exiting now") cleanup(logfile, eb_tmpdir, testing) sys.exit(0) # build software, will exit when errors occurs (except when testing) if not testing or (testing and do_build): exit_on_failure = not (options.dump_test_report or options.upload_test_report) hooks = load_hooks(options.hooks) ecs_with_res = build_and_install_software( ordered_ecs, init_session_state, exit_on_failure=exit_on_failure, hooks=hooks) else: ecs_with_res = [(ec, {}) for ec in ordered_ecs] correct_builds_cnt = len([ ec_res for (_, ec_res) in ecs_with_res if ec_res.get('success', False) ]) overall_success = correct_builds_cnt == len(ordered_ecs) success_msg = "Build succeeded for %s out of %s" % (correct_builds_cnt, len(ordered_ecs)) repo = init_repository(get_repository(), get_repositorypath()) repo.cleanup() # dump/upload overall test report test_report_msg = overall_test_report(ecs_with_res, len(paths), overall_success, success_msg, init_session_state) if test_report_msg is not None: print_msg(test_report_msg) print_msg(success_msg, log=_log, silent=testing) # cleanup and spec files for ec in easyconfigs: if 'original_spec' in ec and os.path.isfile(ec['spec']): os.remove(ec['spec']) # stop logging and cleanup tmp log file, unless one build failed (individual logs are located in eb_tmpdir) stop_logging(logfile, logtostdout=options.logtostdout) if overall_success: cleanup(logfile, eb_tmpdir, testing)
def main(testing_data=(None, None)): """ Main function: @arg options: a tuple: (options, paths, logger, logfile, hn) as defined in parse_options This function will: - read easyconfig - build software """ # disallow running EasyBuild as root if os.getuid() == 0: sys.stderr.write( "ERROR: You seem to be running EasyBuild with root privileges.\n" "That's not wise, so let's end this here.\n" "Exiting.\n" ) sys.exit(1) # steer behavior when testing main testing = testing_data[0] is not None args, logfile = testing_data # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args # initialise logging for main if options.logtostdout: fancylogger.logToScreen(enable=True, stdout=True) else: if logfile is None: # mkstemp returns (fd,filename), fd is from os.open, not regular open! fd, logfile = tempfile.mkstemp(suffix=".log", prefix="easybuild-") os.close(fd) fancylogger.logToFile(logfile) print_msg("temporary log file in case of crash %s" % (logfile), log=None, silent=testing) global _log _log = fancylogger.getLogger(fname=False) # hello world! _log.info(this_is_easybuild()) # set strictness of filetools module if options.strict: filetools.strictness = options.strict if not options.robot is None: if options.robot: _log.info("Using robot path: %s" % options.robot) else: _log.error("No robot path specified, and unable to determine easybuild-easyconfigs install path.") # determine easybuild-easyconfigs package install path easyconfigs_paths = get_paths_for("easyconfigs", robot_path=options.robot) easyconfigs_pkg_full_path = None search_path = os.getcwd() if easyconfigs_paths: easyconfigs_pkg_full_path = easyconfigs_paths[0] if not options.robot: search_path = easyconfigs_pkg_full_path else: search_path = options.robot else: _log.info("Failed to determine install path for easybuild-easyconfigs package.") if options.robot: easyconfigs_paths = [options.robot] + easyconfigs_paths # initialise the easybuild configuration config.init(options, eb_go.get_options_by_section("config")) # search for easyconfigs if options.search: search_file(search_path, options.search, silent=testing) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, software_build_specs) = process_software_build_specs(options) paths = [] if len(orig_paths) == 0: if software_build_specs.has_key("name"): paths = [ obtain_path( software_build_specs, easyconfigs_paths, try_to_generate=try_to_generate, exit_on_error=not testing ) ] elif not any([options.aggregate_regtest, options.search, options.regtest]): print_error( ( "Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs" ), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing, ) else: # look for easyconfigs with relative paths in easybuild-easyconfigs package, # unless they we found at the given relative paths if easyconfigs_pkg_full_path: # create a mapping from filename to path in easybuild-easyconfigs package install path easyconfigs_map = {} for (subpath, _, filenames) in os.walk(easyconfigs_pkg_full_path): for filename in filenames: easyconfigs_map.update({filename: os.path.join(subpath, filename)}) # try and find non-existing non-absolute eaysconfig paths in easybuild-easyconfigs package install path for idx, orig_path in enumerate(orig_paths): if not os.path.isabs(orig_path) and not os.path.exists(orig_path): if orig_path in easyconfigs_map: _log.info( "Found %s in %s: %s" % (orig_path, easyconfigs_pkg_full_path, easyconfigs_map[orig_path]) ) orig_paths[idx] = easyconfigs_map[orig_path] # indicate that specified paths do not contain generated easyconfig files paths = [(path, False) for path in orig_paths] _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") if paths: regtest_ok = regtest(options, [path[0] for path in paths]) else: # fallback: easybuild-easyconfigs install path regtest_ok = regtest(options, [easyconfigs_pkg_full_path]) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 if any([options.search, options.regtest]): cleanup_logfile_and_exit(logfile, testing, True) # building a dependency graph implies force, so that all dependencies are retained # and also skips validation of easyconfigs (e.g. checking os dependencies) validate_easyconfigs = True retain_all_deps = False if options.dep_graph: _log.info("Enabling force to generate dependency graph.") options.force = True validate_easyconfigs = False retain_all_deps = True # read easyconfig files easyconfigs = [] for (path, generated) in paths: path = os.path.abspath(path) if not (os.path.exists(path)): print_error("Can't find path %s" % path) try: files = find_easyconfigs(path) for f in files: if not generated and try_to_generate and software_build_specs: ec_file = easyconfig.tools.tweak(f, None, software_build_specs) else: ec_file = f easyconfigs.extend(process_easyconfig(ec_file, options.only_blocks, validate=validate_easyconfigs)) except IOError, err: _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))
def main(args=None, logfile=None, do_build=None, testing=False): """ Main function: parse command line options, and act accordingly. @param args: command line arguments to use @param logfile: log file to use @param do_build: whether or not to actually perform the build @param testing: enable testing mode """ # purposely session state very early, to avoid modules loaded by EasyBuild meddling in init_session_state = session_state() # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args # set umask (as early as possible) if options.umask is not None: new_umask = int(options.umask, 8) old_umask = os.umask(new_umask) # set by option parsers via set_tmpdir eb_tmpdir = tempfile.gettempdir() # initialise logging for main global _log _log, logfile = init_logging(logfile, logtostdout=options.logtostdout, silent=testing or options.last_log) # disallow running EasyBuild as root if os.getuid() == 0: raise EasyBuildError( "You seem to be running EasyBuild with root privileges which is not wise, " "so let's end this here." ) # log startup info eb_cmd_line = eb_go.generate_cmd_line() + eb_go.args log_start(eb_cmd_line, eb_tmpdir) if options.umask is not None: _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask))) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, build_specs) = process_software_build_specs(options) # determine robot path # --try-X, --dep-graph, --search use robot path for searching, so enable it with path of installed easyconfigs tweaked_ecs = try_to_generate and build_specs tweaked_ecs_path, pr_path = alt_easyconfig_paths(eb_tmpdir, tweaked_ecs=tweaked_ecs, from_pr=options.from_pr) auto_robot = try_to_generate or options.dep_graph or options.search or options.search_short robot_path = det_robot_path(options.robot_paths, tweaked_ecs_path, pr_path, auto_robot=auto_robot) _log.debug("Full robot path: %s" % robot_path) # configure & initialize build options config_options_dict = eb_go.get_options_by_section("config") build_options = { "build_specs": build_specs, "command_line": eb_cmd_line, "pr_path": pr_path, "robot_path": robot_path, "silent": testing, "try_to_generate": try_to_generate, "valid_stops": [x[0] for x in EasyBlock.get_steps()], } # initialise the EasyBuild configuration & build options config.init(options, config_options_dict) config.init_build_options(build_options=build_options, cmdline_options=options) if options.last_log: # print location to last log file, and exit last_log = find_last_log(logfile) or "(none)" print_msg(last_log, log=_log, prefix=False) cleanup(logfile, eb_tmpdir, testing, silent=True) sys.exit(0) # check whether packaging is supported when it's being used if options.package: check_pkg_support() else: _log.debug("Packaging not enabled, so not checking for packaging support.") # update session state eb_config = eb_go.generate_cmd_line(add_default=True) modlist = session_module_list(testing=testing) # build options must be initialized first before 'module list' works init_session_state.update({"easybuild_configuration": eb_config}) init_session_state.update({"module_list": modlist}) _log.debug("Initial session state: %s" % init_session_state) # GitHub integration if options.review_pr or options.new_pr or options.update_pr: if options.review_pr: print review_pr(options.review_pr, colored=options.color) elif options.new_pr: new_pr(orig_paths, title=options.pr_title, descr=options.pr_descr, commit_msg=options.pr_commit_msg) elif options.update_pr: update_pr(options.update_pr, orig_paths, commit_msg=options.pr_commit_msg) cleanup(logfile, eb_tmpdir, testing) sys.exit(0) # search for easyconfigs, if a query is specified query = options.search or options.search_short if query: search_easyconfigs(query, short=not options.search) # determine easybuild-easyconfigs package install path easyconfigs_pkg_paths = get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR) if not easyconfigs_pkg_paths: _log.warning("Failed to determine install path for easybuild-easyconfigs package.") # command line options that do not require any easyconfigs to be specified no_ec_opts = [ options.aggregate_regtest, options.new_pr, options.review_pr, options.search, options.search_short, options.regtest, options.update_pr, ] # determine paths to easyconfigs paths = det_easyconfig_paths(orig_paths) if paths: # transform paths into tuples, use 'False' to indicate the corresponding easyconfig files were not generated paths = [(p, False) for p in paths] else: if "name" in build_specs: # try to obtain or generate an easyconfig file via build specifications if a software name is provided paths = find_easyconfigs_by_specs(build_specs, robot_path, try_to_generate, testing=testing) elif not any(no_ec_opts): print_error( ( "Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs" ), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing, ) _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") # fallback: easybuild-easyconfigs install path regtest_ok = regtest([path[0] for path in paths] or easyconfigs_pkg_paths) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 # read easyconfig files easyconfigs, generated_ecs = parse_easyconfigs(paths) # tweak obtained easyconfig files, if requested # don't try and tweak anything if easyconfigs were generated, since building a full dep graph will fail # if easyconfig files for the dependencies are not available if try_to_generate and build_specs and not generated_ecs: easyconfigs = tweak(easyconfigs, build_specs, targetdir=tweaked_ecs_path) # dry_run: print all easyconfigs and dependencies, and whether they are already built if options.dry_run or options.dry_run_short: txt = dry_run(easyconfigs, short=not options.dry_run) print_msg(txt, log=_log, silent=testing, prefix=False) # cleanup and exit after dry run, searching easyconfigs or submitting regression test if any(no_ec_opts + [options.dry_run, options.dry_run_short]): cleanup(logfile, eb_tmpdir, testing) sys.exit(0) # skip modules that are already installed unless forced if not (options.force or options.rebuild or options.extended_dry_run): retained_ecs = skip_available(easyconfigs) if not testing: for skipped_ec in [ec for ec in easyconfigs if ec not in retained_ecs]: print_msg("%s is already installed (module found), skipping" % skipped_ec["full_mod_name"]) easyconfigs = retained_ecs # determine an order that will allow all specs in the set to build if len(easyconfigs) > 0: if options.robot: print_msg("resolving dependencies ...", log=_log, silent=testing) ordered_ecs = resolve_dependencies( easyconfigs, minimal_toolchains=build_option("minimal_toolchains"), use_existing_modules=build_option("use_existing_modules"), ) else: ordered_ecs = easyconfigs else: print_msg("No easyconfigs left to be built.", log=_log, silent=testing) ordered_ecs = [] # create dependency graph and exit if options.dep_graph: _log.info("Creating dependency graph %s" % options.dep_graph) dep_graph(options.dep_graph, ordered_ecs) sys.exit(0) # submit build as job(s), clean up and exit if options.job: submit_jobs(ordered_ecs, eb_go.generate_cmd_line(), testing=testing) if not testing: print_msg("Submitted parallel build jobs, exiting now") cleanup(logfile, eb_tmpdir, testing) sys.exit(0) # build software, will exit when errors occurs (except when testing) exit_on_failure = not options.dump_test_report and not options.upload_test_report if not testing or (testing and do_build): ecs_with_res = build_and_install_software(ordered_ecs, init_session_state, exit_on_failure=exit_on_failure) else: ecs_with_res = [(ec, {}) for ec in ordered_ecs] correct_builds_cnt = len([ec_res for (_, ec_res) in ecs_with_res if ec_res.get("success", False)]) overall_success = correct_builds_cnt == len(ordered_ecs) success_msg = "Build succeeded for %s out of %s" % (correct_builds_cnt, len(ordered_ecs)) repo = init_repository(get_repository(), get_repositorypath()) repo.cleanup() # dump/upload overall test report test_report_msg = overall_test_report(ecs_with_res, len(paths), overall_success, success_msg, init_session_state) if test_report_msg is not None: print_msg(test_report_msg) print_msg(success_msg, log=_log, silent=testing) # cleanup and spec files for ec in easyconfigs: if "original_spec" in ec and os.path.isfile(ec["spec"]): os.remove(ec["spec"]) # stop logging and cleanup tmp log file, unless one build failed (individual logs are located in eb_tmpdir) stop_logging(logfile, logtostdout=options.logtostdout) if overall_success: cleanup(logfile, eb_tmpdir, testing)
def test_changed_files_pull_request(self): """Specific checks only done for the (easyconfig) files that were changed in a pull request.""" def get_eb_files_from_diff(diff_filter): cmd = "git diff --name-only --diff-filter=%s %s...HEAD" % (diff_filter, target_branch) out, ec = run_cmd(cmd, simple=False) return [os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb')] # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR travis_pr_test = re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)')) # when testing a PR in GitHub Actions, $GITHUB_EVENT_NAME will be set to 'pull_request' github_pr_test = os.environ.get('GITHUB_EVENT_NAME') == 'pull_request' if travis_pr_test or github_pr_test: # target branch should be anything other than 'master'; # usually is 'develop', but could also be a release branch like '3.7.x' if travis_pr_test: target_branch = os.environ.get('TRAVIS_BRANCH', None) else: target_branch = os.environ.get('GITHUB_BASE_REF', None) if target_branch is None: self.assertTrue(False, "Failed to determine target branch for current pull request.") if target_branch != 'master': if not EasyConfigTest.parsed_easyconfigs: self.process_all_easyconfigs() # relocate to top-level directory of repository to run 'git diff' command top_dir = os.path.dirname(os.path.dirname(get_paths_for('easyconfigs')[0])) cwd = change_dir(top_dir) # get list of changed easyconfigs changed_ecs_filenames = get_eb_files_from_diff(diff_filter='M') added_ecs_filenames = get_eb_files_from_diff(diff_filter='A') if changed_ecs_filenames: print("\nList of changed easyconfig files in this PR: %s" % '\n'.join(changed_ecs_filenames)) if added_ecs_filenames: print("\nList of added easyconfig files in this PR: %s" % '\n'.join(added_ecs_filenames)) change_dir(cwd) # grab parsed easyconfigs for changed easyconfig files changed_ecs = [] for ec_fn in changed_ecs_filenames + added_ecs_filenames: match = None for ec in EasyConfigTest.parsed_easyconfigs: if os.path.basename(ec['spec']) == ec_fn: match = ec['ec'] break if match: changed_ecs.append(match) else: # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR... # so as a last resort, try to find the easyconfig file in __archive__ easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/__archive__/*/*/%s' % (easyconfigs_path, ec_fn)) if len(specs) == 1: ec = process_easyconfig(specs[0])[0] changed_ecs.append(ec['ec']) else: error_msg = "Failed to find parsed easyconfig for %s" % ec_fn error_msg += " (and could not isolate it in easyconfigs archive either)" self.assertTrue(False, error_msg) # run checks on changed easyconfigs self.check_sha256_checksums(changed_ecs) self.check_python_packages(changed_ecs, added_ecs_filenames) self.check_sanity_check_paths(changed_ecs) self.check_https(changed_ecs)
def main(testing_data=(None, None, None)): """ Main function: @arg options: a tuple: (options, paths, logger, logfile, hn) as defined in parse_options This function will: - read easyconfig - build software """ # purposely session state very early, to avoid modules loaded by EasyBuild meddling in init_session_state = session_state() # disallow running EasyBuild as root if os.getuid() == 0: sys.stderr.write( "ERROR: You seem to be running EasyBuild with root privileges.\n" "That's not wise, so let's end this here.\n" "Exiting.\n") sys.exit(1) # steer behavior when testing main testing = testing_data[0] is not None args, logfile, do_build = testing_data # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args eb_config = eb_go.generate_cmd_line(add_default=True) init_session_state.update({'easybuild_configuration': eb_config}) # set umask (as early as possible) if options.umask is not None: new_umask = int(options.umask, 8) old_umask = os.umask(new_umask) # set temporary directory to use eb_tmpdir = set_tmpdir(options.tmpdir) # initialise logging for main if options.logtostdout: fancylogger.logToScreen(enable=True, stdout=True) else: if logfile is None: # mkstemp returns (fd,filename), fd is from os.open, not regular open! fd, logfile = tempfile.mkstemp(suffix='.log', prefix='easybuild-') os.close(fd) fancylogger.logToFile(logfile) print_msg('temporary log file in case of crash %s' % (logfile), log=None, silent=testing) global _log _log = fancylogger.getLogger(fname=False) if options.umask is not None: _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask))) # hello world! _log.info(this_is_easybuild()) # how was EB called? eb_command_line = eb_go.generate_cmd_line() + eb_go.args _log.info("Command line: %s" % (" ".join(eb_command_line))) _log.info("Using %s as temporary directory" % eb_tmpdir) if not options.robot is None: if options.robot: _log.info("Using robot path(s): %s" % options.robot) else: _log.error( "No robot paths specified, and unable to determine easybuild-easyconfigs install path." ) # do not pass options.robot, it's not a list instance (and it shouldn't be modified) robot_path = None if options.robot: robot_path = list(options.robot) # determine easybuild-easyconfigs package install path easyconfigs_paths = get_paths_for("easyconfigs", robot_path=robot_path) # keep track of paths for install easyconfigs, so we can obtain find specified easyconfigs easyconfigs_pkg_full_paths = easyconfigs_paths[:] if not easyconfigs_paths: _log.warning( "Failed to determine install path for easybuild-easyconfigs package." ) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, build_specs) = process_software_build_specs(options) # specified robot paths are preferred over installed easyconfig files # --try-X and --dep-graph both require --robot, so enable it with path of installed easyconfigs if robot_path or try_to_generate or options.dep_graph: if robot_path is None: robot_path = [] robot_path.extend(easyconfigs_paths) easyconfigs_paths = robot_path[:] _log.info( "Extended list of robot paths with paths for installed easyconfigs: %s" % robot_path) # initialise the easybuild configuration config.init(options, eb_go.get_options_by_section('config')) # building a dependency graph implies force, so that all dependencies are retained # and also skips validation of easyconfigs (e.g. checking os dependencies) retain_all_deps = False if options.dep_graph: _log.info("Enabling force to generate dependency graph.") options.force = True retain_all_deps = True config.init_build_options({ 'aggregate_regtest': options.aggregate_regtest, 'allow_modules_tool_mismatch': options.allow_modules_tool_mismatch, 'check_osdeps': not options.ignore_osdeps, 'cleanup_builddir': options.cleanup_builddir, 'command_line': eb_command_line, 'debug': options.debug, 'dry_run': options.dry_run, 'easyblock': options.easyblock, 'experimental': options.experimental, 'force': options.force, 'github_user': options.github_user, 'group': options.group, 'ignore_dirs': options.ignore_dirs, 'modules_footer': options.modules_footer, 'only_blocks': options.only_blocks, 'recursive_mod_unload': options.recursive_module_unload, 'regtest_output_dir': options.regtest_output_dir, 'retain_all_deps': retain_all_deps, 'robot_path': robot_path, 'sequential': options.sequential, 'silent': testing, 'set_gid_bit': options.set_gid_bit, 'skip': options.skip, 'skip_test_cases': options.skip_test_cases, 'sticky_bit': options.sticky_bit, 'stop': options.stop, 'umask': options.umask, 'valid_module_classes': module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], 'validate': not options.force, }) # obtain list of loaded modules, build options must be initialized first modlist = session_module_list() init_session_state.update({'module_list': modlist}) _log.debug("Initial session state: %s" % init_session_state) # search for easyconfigs if options.search or options.search_short: search_path = [os.getcwd()] if easyconfigs_paths: search_path = easyconfigs_paths query = options.search or options.search_short ignore_dirs = config.build_option('ignore_dirs') silent = config.build_option('silent') search_file(search_path, query, short=not options.search, ignore_dirs=ignore_dirs, silent=silent) paths = [] if len(orig_paths) == 0: if options.from_pr: pr_path = os.path.join(eb_tmpdir, "files_pr%s" % options.from_pr) pr_files = fetch_easyconfigs_from_pr( options.from_pr, path=pr_path, github_user=options.github_user) paths = [(path, False) for path in pr_files if path.endswith('.eb')] elif 'name' in build_specs: paths = [ obtain_path(build_specs, easyconfigs_paths, try_to_generate=try_to_generate, exit_on_error=not testing) ] elif not any([ options.aggregate_regtest, options.search, options.search_short, options.regtest ]): print_error(( "Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) else: # look for easyconfigs with relative paths in easybuild-easyconfigs package, # unless they were found at the given relative paths if easyconfigs_pkg_full_paths: # determine which easyconfigs files need to be found, if any ecs_to_find = [] for idx, orig_path in enumerate(orig_paths): if orig_path == os.path.basename( orig_path) and not os.path.exists(orig_path): ecs_to_find.append((idx, orig_path)) _log.debug("List of easyconfig files to find: %s" % ecs_to_find) # find missing easyconfigs by walking paths with installed easyconfig files for path in easyconfigs_pkg_full_paths: _log.debug( "Looking for missing easyconfig files (%d left) in %s..." % (len(ecs_to_find), path)) for (subpath, dirnames, filenames) in os.walk(path, topdown=True): for idx, orig_path in ecs_to_find[:]: if orig_path in filenames: full_path = os.path.join(subpath, orig_path) _log.info("Found %s in %s: %s" % (orig_path, path, full_path)) orig_paths[idx] = full_path # if file was found, stop looking for it (first hit wins) ecs_to_find.remove((idx, orig_path)) # stop os.walk insanity as soon as we have all we need (os.walk loop) if len(ecs_to_find) == 0: break # ignore subdirs specified to be ignored by replacing items in dirnames list used by os.walk dirnames[:] = [ d for d in dirnames if not d in options.ignore_dirs ] # stop os.walk insanity as soon as we have all we need (paths loop) if len(ecs_to_find) == 0: break # indicate that specified paths do not contain generated easyconfig files paths = [(path, False) for path in orig_paths] _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") if paths: ec_paths = [path[0] for path in paths] else: # fallback: easybuild-easyconfigs install path ec_paths = easyconfigs_pkg_full_paths regtest_ok = regtest(ec_paths) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 # read easyconfig files easyconfigs = [] for (path, generated) in paths: path = os.path.abspath(path) if not os.path.exists(path): print_error("Can't find path %s" % path) try: ec_files = find_easyconfigs(path, ignore_dirs=options.ignore_dirs) for ec_file in ec_files: # only pass build specs when not generating easyconfig files if try_to_generate: ecs = process_easyconfig(ec_file) else: ecs = process_easyconfig(ec_file, build_specs=build_specs) easyconfigs.extend(ecs) except IOError, err: _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))
class EasyConfigTest(TestCase): """Baseclass for easyconfig testcases.""" # initialize configuration (required for e.g. default modules_tool setting) eb_go = eboptions.parse_options() config.init(eb_go.options, eb_go.get_options_by_section('config')) build_options = { 'check_osdeps': False, 'external_modules_metadata': {}, 'force': True, 'local_var_naming_check': 'error', 'optarch': 'test', 'robot_path': get_paths_for("easyconfigs")[0], 'silent': True, 'suffix_modules_path': GENERAL_CLASS, 'valid_module_classes': config.module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } config.init_build_options(build_options=build_options) set_tmpdir() del eb_go # put dummy 'craype-test' module in place, which is required for parsing easyconfigs using Cray* toolchains TMPDIR = tempfile.mkdtemp() os.environ['MODULEPATH'] = TMPDIR write_file(os.path.join(TMPDIR, 'craype-test'), '#%Module\n') log = fancylogger.getLogger("EasyConfigTest", fname=False) # make sure a logger is present for main eb_main._log = log ordered_specs = None parsed_easyconfigs = [] def process_all_easyconfigs(self): """Process all easyconfigs and resolve inter-easyconfig dependencies.""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) # parse all easyconfigs if they haven't been already if not EasyConfigTest.parsed_easyconfigs: for spec in specs: EasyConfigTest.parsed_easyconfigs.extend(process_easyconfig(spec)) # filter out external modules for ec in EasyConfigTest.parsed_easyconfigs: for dep in ec['dependencies'][:]: if dep.get('external_module', False): ec['dependencies'].remove(dep) EasyConfigTest.ordered_specs = resolve_dependencies(EasyConfigTest.parsed_easyconfigs, modules_tool(), retain_all_deps=True) def test_dep_graph(self): """Unit test that builds a full dependency graph.""" # pygraph dependencies required for constructing dependency graph are not available prior to Python 2.6 if LooseVersion(sys.version) >= LooseVersion('2.6') and single_tests_ok: # temporary file for dep graph (hn, fn) = tempfile.mkstemp(suffix='.dot') os.close(hn) if EasyConfigTest.ordered_specs is None: self.process_all_easyconfigs() dep_graph(fn, EasyConfigTest.ordered_specs) remove_file(fn) else: print("(skipped dep graph test)") def test_conflicts(self): """Check whether any conflicts occur in software dependency graphs.""" if not single_tests_ok: print("(skipped conflicts test)") return if EasyConfigTest.ordered_specs is None: self.process_all_easyconfigs() self.assertFalse(check_conflicts(EasyConfigTest.ordered_specs, modules_tool(), check_inter_ec_conflicts=False), "No conflicts detected") def check_dep_vars(self, dep, dep_vars): """Check whether available variants of a particular dependency are acceptable or not.""" # 'guilty' until proven 'innocent' res = False # filter out wrapped Java versions # i.e. if the version of one is a prefix of the version of the other one (e.g. 1.8 & 1.8.0_181) if dep == 'Java': dep_vars_to_check = sorted(dep_vars.keys()) retained_dep_vars = [] while dep_vars_to_check: dep_var = dep_vars_to_check.pop() dep_var_version = dep_var.split(';')[0] # remove dep vars wrapped by current dep var dep_vars_to_check = [x for x in dep_vars_to_check if not x.startswith(dep_var_version + '.')] retained_dep_vars = [x for x in retained_dep_vars if not x.startswith(dep_var_version + '.')] retained_dep_vars.append(dep_var) for key in list(dep_vars.keys()): if key not in retained_dep_vars: del dep_vars[key] # filter out binutils with empty versionsuffix which is used to build toolchain compiler if dep == 'binutils' and len(dep_vars) > 1: empty_vsuff_vars = [v for v in dep_vars.keys() if v.endswith('versionsuffix: ')] if len(empty_vsuff_vars) == 1: dep_vars = dict((k, v) for (k, v) in dep_vars.items() if k != empty_vsuff_vars[0]) # multiple variants of HTSlib is OK as long as they are deps for a matching version of BCFtools; # same goes for WRF and WPS for dep_name, parent_name in [('HTSlib', 'BCFtools'), ('WRF', 'WPS')]: if dep == dep_name and len(dep_vars) > 1: for key in list(dep_vars): ecs = dep_vars[key] # filter out dep variants that are only used as dependency for parent with same version dep_ver = re.search('^version: (?P<ver>[^;]+);', key).group('ver') if all(ec.startswith('%s-%s-' % (parent_name, dep_ver)) for ec in ecs) and len(dep_vars) > 1: dep_vars.pop(key) # multiple versions of Boost is OK as long as they are deps for a matching Boost.Python if dep == 'Boost' and len(dep_vars) > 1: for key in list(dep_vars): ecs = dep_vars[key] # filter out Boost variants that are only used as dependency for Boost.Python with same version boost_ver = re.search('^version: (?P<ver>[^;]+);', key).group('ver') if all(ec.startswith('Boost.Python-%s-' % boost_ver) for ec in ecs): dep_vars.pop(key) # filter out FFTW and imkl with -serial versionsuffix which are used in non-MPI subtoolchains if dep in ['FFTW', 'imkl']: serial_vsuff_vars = [v for v in dep_vars.keys() if v.endswith('versionsuffix: -serial')] if len(serial_vsuff_vars) == 1: dep_vars = dict((k, v) for (k, v) in dep_vars.items() if k != serial_vsuff_vars[0]) # for some dependencies, we allow exceptions for software that depends on a particular version, # as long as that's indicated by the versionsuffix if dep in ['ASE', 'Boost', 'Java', 'Lua', 'PLUMED', 'R', 'TensorFlow'] and len(dep_vars) > 1: for key in list(dep_vars): dep_ver = re.search('^version: (?P<ver>[^;]+);', key).group('ver') # use version of Java wrapper rather than full Java version if dep == 'Java': dep_ver = '.'.join(dep_ver.split('.')[:2]) # filter out dep version if all easyconfig filenames using it include specific dep version if all(re.search('-%s-%s' % (dep, dep_ver), v) for v in dep_vars[key]): dep_vars.pop(key) # always retain at least one dep variant if len(dep_vars) == 1: break # filter R dep for a specific version of Python 2.x if dep == 'R' and len(dep_vars) > 1: for key in list(dep_vars): if '; versionsuffix: -Python-2' in key: dep_vars.pop(key) # always retain at least one variant if len(dep_vars) == 1: break # filter out variants that are specific to a particular version of CUDA cuda_dep_vars = [v for v in dep_vars.keys() if '-CUDA' in v] if len(dep_vars) > len(cuda_dep_vars): for key in list(dep_vars): if re.search('; versionsuffix: .*-CUDA-[0-9.]+', key): dep_vars.pop(key) # some software packages require an old version of a particular dependency old_dep_versions = { # libxc 2.x or 3.x is required by ABINIT, AtomPAW, CP2K, GPAW, horton, PySCF, WIEN2k # (Qiskit depends on PySCF) 'libxc': (r'[23]\.', ['ABINIT-', 'AtomPAW-', 'CP2K-', 'GPAW-', 'horton-', 'PySCF-', 'Qiskit-', 'WIEN2k-']), # OPERA requires SAMtools 0.x 'SAMtools': (r'0\.', ['ChimPipe-0.9.5', 'Cufflinks-2.2.1', 'OPERA-2.0.6']), # Kraken 1.x requires Jellyfish 1.x (Roary & metaWRAP depend on Kraken 1.x) 'Jellyfish': (r'1\.', ['Kraken-1.', 'Roary-3.12.0', 'metaWRAP-1.2']), # EMAN2 2.3 requires Boost(.Python) 1.64.0 'Boost': ('1.64.0;', ['Boost.Python-1.64.0-', 'EMAN2-2.3-']), 'Boost.Python': ('1.64.0;', ['EMAN2-2.3-']), } if dep in old_dep_versions and len(dep_vars) > 1: for key in list(dep_vars): version_pattern, parents = old_dep_versions[dep] # filter out known old dependency versions if re.search('^version: %s' % version_pattern, key): # only filter if the easyconfig using this dep variants is known if all(any(x.startswith(p) for p in parents) for x in dep_vars[key]): dep_vars.pop(key) # only single variant is always OK if len(dep_vars) == 1: res = True elif len(dep_vars) == 2 and dep in ['Python', 'Tkinter']: # for Python & Tkinter, it's OK to have on 2.x and one 3.x version v2_dep_vars = [x for x in dep_vars.keys() if x.startswith('version: 2.')] v3_dep_vars = [x for x in dep_vars.keys() if x.startswith('version: 3.')] if len(v2_dep_vars) == 1 and len(v3_dep_vars) == 1: res = True # two variants is OK if one is for Python 2.x and the other is for Python 3.x (based on versionsuffix) elif len(dep_vars) == 2: py2_dep_vars = [x for x in dep_vars.keys() if '; versionsuffix: -Python-2.' in x] py3_dep_vars = [x for x in dep_vars.keys() if '; versionsuffix: -Python-3.' in x] if len(py2_dep_vars) == 1 and len(py3_dep_vars) == 1: res = True return res def test_check_dep_vars(self): """Test check_dep_vars utility method.""" # one single dep version: OK self.assertTrue(self.check_dep_vars('testdep', { 'version: 1.2.3; versionsuffix:': ['foo-1.2.3.eb', 'bar-4.5.6.eb'], })) self.assertTrue(self.check_dep_vars('testdep', { 'version: 1.2.3; versionsuffix: -test': ['foo-1.2.3.eb', 'bar-4.5.6.eb'], })) # two or more dep versions (no special case: not OK) self.assertFalse(self.check_dep_vars('testdep', { 'version: 1.2.3; versionsuffix:': ['foo-1.2.3.eb'], 'version: 4.5.6; versionsuffix:': ['bar-4.5.6.eb'], })) self.assertFalse(self.check_dep_vars('testdep', { 'version: 0.0; versionsuffix:': ['foobar-0.0.eb'], 'version: 1.2.3; versionsuffix:': ['foo-1.2.3.eb'], 'version: 4.5.6; versionsuffix:': ['bar-4.5.6.eb'], })) # Java is a special case, with wrapped Java versions self.assertTrue(self.check_dep_vars('Java', { 'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'], 'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'], })) # two Java wrappers is not OK self.assertFalse(self.check_dep_vars('Java', { 'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'], 'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'], 'version: 11.0.2; versionsuffix:': ['bar-4.5.6.eb'], 'version: 11; versionsuffix:': ['bar-4.5.6.eb'], })) # OK to have two or more wrappers if versionsuffix is used to indicate exception self.assertTrue(self.check_dep_vars('Java', { 'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'], 'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'], 'version: 11.0.2; versionsuffix:': ['bar-4.5.6-Java-11.eb'], 'version: 11; versionsuffix:': ['bar-4.5.6-Java-11.eb'], })) # versionsuffix must be there for all easyconfigs to indicate exception self.assertFalse(self.check_dep_vars('Java', { 'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'], 'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'], 'version: 11.0.2; versionsuffix:': ['bar-4.5.6-Java-11.eb', 'bar-4.5.6.eb'], 'version: 11; versionsuffix:': ['bar-4.5.6-Java-11.eb', 'bar-4.5.6.eb'], })) self.assertTrue(self.check_dep_vars('Java', { 'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'], 'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'], 'version: 11.0.2; versionsuffix:': ['bar-4.5.6-Java-11.eb'], 'version: 11; versionsuffix:': ['bar-4.5.6-Java-11.eb'], 'version: 12.1.6; versionsuffix:': ['foobar-0.0-Java-12.eb'], 'version: 12; versionsuffix:': ['foobar-0.0-Java-12.eb'], })) # strange situation: odd number of Java versions # not OK: two Java wrappers (and no versionsuffix to indicate exception) self.assertFalse(self.check_dep_vars('Java', { 'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'], 'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'], 'version: 11; versionsuffix:': ['bar-4.5.6.eb'], })) # OK because of -Java-11 versionsuffix self.assertTrue(self.check_dep_vars('Java', { 'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'], 'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'], 'version: 11; versionsuffix:': ['bar-4.5.6-Java-11.eb'], })) # not OK: two Java wrappers (and no versionsuffix to indicate exception) self.assertFalse(self.check_dep_vars('Java', { 'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'], 'version: 11.0.2; versionsuffix:': ['bar-4.5.6.eb'], 'version: 11; versionsuffix:': ['bar-4.5.6.eb'], })) # OK because of -Java-11 versionsuffix self.assertTrue(self.check_dep_vars('Java', { 'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'], 'version: 11.0.2; versionsuffix:': ['bar-4.5.6-Java-11.eb'], 'version: 11; versionsuffix:': ['bar-4.5.6-Java-11.eb'], })) # two different versions of Boost is not OK self.assertFalse(self.check_dep_vars('Boost', { 'version: 1.64.0; versionsuffix:': ['foo-1.2.3.eb'], 'version: 1.70.0; versionsuffix:': ['foo-2.3.4.eb'], })) # a different Boost version that is only used as dependency for a matching Boost.Python is fine self.assertTrue(self.check_dep_vars('Boost', { 'version: 1.64.0; versionsuffix:': ['Boost.Python-1.64.0-gompi-2019a.eb'], 'version: 1.70.0; versionsuffix:': ['foo-2.3.4.eb'], })) self.assertTrue(self.check_dep_vars('Boost', { 'version: 1.64.0; versionsuffix:': ['Boost.Python-1.64.0-gompi-2018b.eb'], 'version: 1.66.0; versionsuffix:': ['Boost.Python-1.66.0-gompi-2019a.eb'], 'version: 1.70.0; versionsuffix:': ['foo-2.3.4.eb'], })) self.assertFalse(self.check_dep_vars('Boost', { 'version: 1.64.0; versionsuffix:': ['Boost.Python-1.64.0-gompi-2019a.eb'], 'version: 1.66.0; versionsuffix:': ['foo-1.2.3.eb'], 'version: 1.70.0; versionsuffix:': ['foo-2.3.4.eb'], })) self.assertTrue(self.check_dep_vars('Boost', { 'version: 1.63.0; versionsuffix: -Python-2.7.14': ['EMAN2-2.21a-foss-2018a-Python-2.7.14-Boost-1.63.0.eb'], 'version: 1.64.0; versionsuffix:': ['Boost.Python-1.64.0-gompi-2018a.eb'], 'version: 1.66.0; versionsuffix:': ['BLAST+-2.7.1-foss-2018a.eb'], })) self.assertTrue(self.check_dep_vars('Boost', { 'version: 1.64.0; versionsuffix:': [ 'Boost.Python-1.64.0-gompi-2019a.eb', 'EMAN2-2.3-foss-2019a-Python-2.7.15.eb', ], 'version: 1.70.0; versionsuffix:': [ 'BLAST+-2.9.0-gompi-2019a.eb', 'Boost.Python-1.70.0-gompi-2019a.eb', ], })) def test_dep_versions_per_toolchain_generation(self): """ Check whether there's only one dependency version per toolchain generation actively used. This is enforced to try and limit the chance of running into conflicts when multiple modules built with the same toolchain are loaded together. """ if EasyConfigTest.ordered_specs is None: self.process_all_easyconfigs() def get_deps_for(ec): """Get list of (direct) dependencies for specified easyconfig.""" deps = [] for dep in ec['ec']['dependencies']: dep_mod_name = dep['full_mod_name'] deps.append((dep['name'], dep['version'], dep['versionsuffix'], dep_mod_name)) res = [x for x in EasyConfigTest.ordered_specs if x['full_mod_name'] == dep_mod_name] if len(res) == 1: deps.extend(get_deps_for(res[0])) else: raise EasyBuildError("Failed to find %s in ordered list of easyconfigs", dep_mod_name) return deps # some software also follows <year>{a,b} versioning scheme, # which throws off the pattern matching done below for toolchain versions false_positives_regex = re.compile('^MATLAB-Engine-20[0-9][0-9][ab]') # restrict to checking dependencies of easyconfigs using common toolchains (start with 2018a) # and GCCcore subtoolchain for common toolchains, starting with GCCcore 7.x for pattern in ['201[89][ab]', '20[2-9][0-9][ab]', 'GCCcore-[7-9]\.[0-9]']: all_deps = {} regex = re.compile('^.*-(?P<tc_gen>%s).*\.eb$' % pattern) # collect variants for all dependencies of easyconfigs that use a toolchain that matches for ec in EasyConfigTest.ordered_specs: ec_file = os.path.basename(ec['spec']) # take into account software which also follows a <year>{a,b} versioning scheme ec_file = false_positives_regex.sub('', ec_file) res = regex.match(ec_file) if res: tc_gen = res.group('tc_gen') all_deps_tc_gen = all_deps.setdefault(tc_gen, {}) for dep_name, dep_ver, dep_versuff, dep_mod_name in get_deps_for(ec): dep_variants = all_deps_tc_gen.setdefault(dep_name, {}) # a variant is defined by version + versionsuffix variant = "version: %s; versionsuffix: %s" % (dep_ver, dep_versuff) # keep track of which easyconfig this is a dependency dep_variants.setdefault(variant, set()).add(ec_file) # check which dependencies have more than 1 variant multi_dep_vars, multi_dep_vars_msg = [], '' for tc_gen in sorted(all_deps.keys()): for dep in sorted(all_deps[tc_gen].keys()): dep_vars = all_deps[tc_gen][dep] if not self.check_dep_vars(dep, dep_vars): multi_dep_vars.append(dep) multi_dep_vars_msg += "\nfound %s variants of '%s' dependency " % (len(dep_vars), dep) multi_dep_vars_msg += "in easyconfigs using '%s' toolchain generation\n* " % tc_gen multi_dep_vars_msg += '\n* '.join("%s as dep for %s" % v for v in sorted(dep_vars.items())) multi_dep_vars_msg += '\n' error_msg = "No multi-variant deps found for '%s' easyconfigs:\n%s" % (regex.pattern, multi_dep_vars_msg) self.assertFalse(multi_dep_vars, error_msg) def test_sanity_check_paths(self): """Make sure specified sanity check paths adher to the requirements.""" if EasyConfigTest.ordered_specs is None: self.process_all_easyconfigs() for ec in EasyConfigTest.parsed_easyconfigs: ec_scp = ec['ec']['sanity_check_paths'] if ec_scp != {}: # if sanity_check_paths is specified (i.e., non-default), it must adher to the requirements # both 'files' and 'dirs' keys, both with list values and with at least one a non-empty list error_msg = "sanity_check_paths for %s does not meet requirements: %s" % (ec['spec'], ec_scp) self.assertEqual(sorted(ec_scp.keys()), ['dirs', 'files'], error_msg) self.assertTrue(isinstance(ec_scp['dirs'], list), error_msg) self.assertTrue(isinstance(ec_scp['files'], list), error_msg) self.assertTrue(ec_scp['dirs'] or ec_scp['files'], error_msg) def test_easyconfig_locations(self): """Make sure all easyconfigs files are in the right location.""" easyconfig_dirs_regex = re.compile(r'/easybuild/easyconfigs/[0a-z]/[^/]+$') topdir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) for (dirpath, _, filenames) in os.walk(topdir): # ignore git/svn dirs & archived easyconfigs if '/.git/' in dirpath or '/.svn/' in dirpath or '__archive__' in dirpath: continue # check whether list of .eb files is non-empty easyconfig_files = [fn for fn in filenames if fn.endswith('eb')] if easyconfig_files: # check whether path matches required pattern if not easyconfig_dirs_regex.search(dirpath): # only exception: TEMPLATE.eb if not (dirpath.endswith('/easybuild/easyconfigs') and filenames == ['TEMPLATE.eb']): self.assertTrue(False, "List of easyconfig files in %s is empty: %s" % (dirpath, filenames)) def check_sha256_checksums(self, changed_ecs): """Make sure changed easyconfigs have SHA256 checksums in place.""" # list of software for which checksums can not be required, # e.g. because 'source' files need to be constructed manually whitelist = ['Kent_tools-*', 'MATLAB-*', 'OCaml-*'] # the check_sha256_checksums function (again) creates an EasyBlock instance # for easyconfigs using the Bundle easyblock, this is a problem because the 'sources' easyconfig parameter # is updated in place (sources for components are added the 'parent' sources) in Bundle's __init__; # therefore, we need to reset 'sources' to an empty list here if Bundle is used... # likewise for 'patches' and 'checksums' for ec in changed_ecs: if ec['easyblock'] == 'Bundle': ec['sources'] = [] ec['patches'] = [] ec['checksums'] = [] # filter out deprecated easyconfigs retained_changed_ecs = [] for ec in changed_ecs: if not ec['deprecated']: retained_changed_ecs.append(ec) checksum_issues = check_sha256_checksums(retained_changed_ecs, whitelist=whitelist) self.assertTrue(len(checksum_issues) == 0, "No checksum issues:\n%s" % '\n'.join(checksum_issues)) def check_python_packages(self, changed_ecs, added_ecs_filenames): """Several checks for easyconfigs that install (bundles of) Python packages.""" # These packages do not support installation with 'pip' whitelist_pip = [r'MATLAB-Engine-.*', r'PyTorch-.*', r'Meld-.*'] failing_checks = [] for ec in changed_ecs: ec_fn = os.path.basename(ec.path) easyblock = ec.get('easyblock') exts_defaultclass = ec.get('exts_defaultclass') exts_default_options = ec.get('exts_default_options', {}) download_dep_fail = ec.get('download_dep_fail') exts_download_dep_fail = ec.get('exts_download_dep_fail') use_pip = ec.get('use_pip') # download_dep_fail should be set when using PythonPackage if easyblock == 'PythonPackage': if download_dep_fail is None: failing_checks.append("'download_dep_fail' set in %s" % ec_fn) # use_pip should be set when using PythonPackage or PythonBundle (except for whitelisted easyconfigs) if easyblock in ['PythonBundle', 'PythonPackage']: if use_pip is None and not any(re.match(regex, ec_fn) for regex in whitelist_pip): failing_checks.append("'use_pip' set in %s" % ec_fn) # download_dep_fail is enabled automatically in PythonBundle easyblock, so shouldn't be set if easyblock == 'PythonBundle': if download_dep_fail or exts_download_dep_fail: fail = "'*download_dep_fail' set in %s (shouldn't, since PythonBundle easyblock is used)" % ec_fn failing_checks.append(fail) elif exts_defaultclass == 'PythonPackage': # bundle of Python packages should use PythonBundle if easyblock == 'Bundle': fail = "'PythonBundle' easyblock is used for bundle of Python packages in %s" % ec_fn failing_checks.append(fail) else: # both download_dep_fail and use_pip should be set via exts_default_options # when installing Python packages as extensions for key in ['download_dep_fail', 'use_pip']: if exts_default_options.get(key) is None: failing_checks.append("'%s' set in exts_default_options in %s" % (key, ec_fn)) # if Python is a dependency, that should be reflected in the versionsuffix # Tkinter is an exception, since its version always matches the Python version anyway if any(dep['name'] == 'Python' for dep in ec['dependencies']) and ec.name != 'Tkinter': if not re.search(r'-Python-[23]\.[0-9]+\.[0-9]+', ec['versionsuffix']): msg = "'-Python-%%(pyver)s' included in versionsuffix in %s" % ec_fn # This is only a failure for newly added ECs, not for existing ECS # As that would probably break many ECs if ec_fn in added_ecs_filenames: failing_checks.append(msg) else: print('\nNote: Failed non-critical check: ' + msg) # require that running of "pip check" during sanity check is enabled via sanity_pip_check if use_pip and easyblock in ['PythonBundle', 'PythonPackage']: sanity_pip_check = ec.get('sanity_pip_check') or exts_default_options.get('sanity_pip_check') if not sanity_pip_check and not any(re.match(regex, ec_fn) for regex in whitelist_pip): failing_checks.append("sanity_pip_check is enabled in %s" % ec_fn) self.assertFalse(failing_checks, '\n'.join(failing_checks)) def check_sanity_check_paths(self, changed_ecs): """Make sure a custom sanity_check_paths value is specified for easyconfigs that use a generic easyblock.""" # PythonBundle & PythonPackage already have a decent customised sanity_check_paths # BuildEnv, ModuleRC and Toolchain easyblocks doesn't install anything so there is nothing to check. whitelist = ['CrayToolchain', 'ModuleRC', 'PythonBundle', 'PythonPackage', 'Toolchain', 'BuildEnv'] # Autotools & (recent) GCC are just bundles (Autotools: Autoconf+Automake+libtool, GCC: GCCcore+binutils) bundles_whitelist = ['Autotools', 'GCC'] failing_checks = [] for ec in changed_ecs: easyblock = ec.get('easyblock') if is_generic_easyblock(easyblock) and not ec.get('sanity_check_paths'): if easyblock in whitelist or (easyblock == 'Bundle' and ec['name'] in bundles_whitelist): pass else: ec_fn = os.path.basename(ec.path) failing_checks.append("No custom sanity_check_paths found in %s" % ec_fn) self.assertFalse(failing_checks, '\n'.join(failing_checks)) def check_https(self, changed_ecs): """Make sure https:// URL is used (if it exists) for homepage/source_urls (rather than http://).""" whitelist = [ 'Kaiju', # invalid certificate at https://kaiju.binf.ku.dk 'libxml2', # https://xmlsoft.org works, but invalid certificate 'p4vasp', # https://www.p4vasp.at doesn't work 'ITSTool', # https://itstool.org/ doesn't work 'UCX-', # bad certificate for https://www.openucx.org ] http_regex = re.compile('http://[^"\'\n]+', re.M) failing_checks = [] for ec in changed_ecs: ec_fn = os.path.basename(ec.path) # skip whitelisted easyconfigs if any(ec_fn.startswith(x) for x in whitelist): continue # ignore commented out lines in easyconfig files when checking for http:// URLs ec_txt = '\n'.join(l for l in ec.rawtxt.split('\n') if not l.startswith('#')) for http_url in http_regex.findall(ec_txt): https_url = http_url.replace('http://', 'https://') try: https_url_works = bool(urlopen(https_url, timeout=5)) except Exception: https_url_works = False if https_url_works: failing_checks.append("Found http:// URL in %s, should be https:// : %s" % (ec_fn, http_url)) self.assertFalse(failing_checks, '\n'.join(failing_checks)) def test_changed_files_pull_request(self): """Specific checks only done for the (easyconfig) files that were changed in a pull request.""" def get_eb_files_from_diff(diff_filter): cmd = "git diff --name-only --diff-filter=%s %s...HEAD" % (diff_filter, target_branch) out, ec = run_cmd(cmd, simple=False) return [os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb')] # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR travis_pr_test = re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)')) # when testing a PR in GitHub Actions, $GITHUB_EVENT_NAME will be set to 'pull_request' github_pr_test = os.environ.get('GITHUB_EVENT_NAME') == 'pull_request' if travis_pr_test or github_pr_test: # target branch should be anything other than 'master'; # usually is 'develop', but could also be a release branch like '3.7.x' if travis_pr_test: target_branch = os.environ.get('TRAVIS_BRANCH', None) else: target_branch = os.environ.get('GITHUB_BASE_REF', None) if target_branch is None: self.assertTrue(False, "Failed to determine target branch for current pull request.") if target_branch != 'master': if not EasyConfigTest.parsed_easyconfigs: self.process_all_easyconfigs() # relocate to top-level directory of repository to run 'git diff' command top_dir = os.path.dirname(os.path.dirname(get_paths_for('easyconfigs')[0])) cwd = change_dir(top_dir) # get list of changed easyconfigs changed_ecs_filenames = get_eb_files_from_diff(diff_filter='M') added_ecs_filenames = get_eb_files_from_diff(diff_filter='A') if changed_ecs_filenames: print("\nList of changed easyconfig files in this PR: %s" % '\n'.join(changed_ecs_filenames)) if added_ecs_filenames: print("\nList of added easyconfig files in this PR: %s" % '\n'.join(added_ecs_filenames)) change_dir(cwd) # grab parsed easyconfigs for changed easyconfig files changed_ecs = [] for ec_fn in changed_ecs_filenames + added_ecs_filenames: match = None for ec in EasyConfigTest.parsed_easyconfigs: if os.path.basename(ec['spec']) == ec_fn: match = ec['ec'] break if match: changed_ecs.append(match) else: # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR... # so as a last resort, try to find the easyconfig file in __archive__ easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/__archive__/*/*/%s' % (easyconfigs_path, ec_fn)) if len(specs) == 1: ec = process_easyconfig(specs[0])[0] changed_ecs.append(ec['ec']) else: error_msg = "Failed to find parsed easyconfig for %s" % ec_fn error_msg += " (and could not isolate it in easyconfigs archive either)" self.assertTrue(False, error_msg) # run checks on changed easyconfigs self.check_sha256_checksums(changed_ecs) self.check_python_packages(changed_ecs, added_ecs_filenames) self.check_sanity_check_paths(changed_ecs) self.check_https(changed_ecs) def test_zzz_cleanup(self): """Dummy test to clean up global temporary directory.""" shutil.rmtree(self.TMPDIR)
def suite(): """Return all easyblock --module-only tests.""" def make_inner_test(easyblock, **kwargs): def innertest(self): template_module_only_test(self, easyblock, **kwargs) return innertest # initialize configuration (required for e.g. default modules_tool setting) cleanup() eb_go = eboptions.parse_options(args=['--prefix=%s' % TMPDIR]) config.init(eb_go.options, eb_go.get_options_by_section('config')) build_options = { 'external_modules_metadata': {}, # enable --force --module-only 'force': True, 'module_only': True, 'silent': True, 'suffix_modules_path': GENERAL_CLASS, 'valid_module_classes': config.module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } config.init_build_options(build_options=build_options) set_tmpdir() # dynamically generate a separate test for each of the available easyblocks easyblocks_path = get_paths_for("easyblocks")[0] all_pys = glob.glob('%s/*/*.py' % easyblocks_path) easyblocks = [ eb for eb in all_pys if os.path.basename(eb) != '__init__.py' and '/test/' not in eb ] # filter out no longer supported easyblocks, or easyblocks that are tested in a different way excluded_easyblocks = ['versionindependendpythonpackage.py'] easyblocks = [ e for e in easyblocks if os.path.basename(e) not in excluded_easyblocks ] # add dummy PrgEnv-* modules, required for testing CrayToolchain easyblock for prgenv in ['PrgEnv-cray', 'PrgEnv-gnu', 'PrgEnv-intel', 'PrgEnv-pgi']: write_file(os.path.join(TMPDIR, 'modules', 'all', prgenv, '1.2.3'), "#%Module") # add foo/1.3.2.1.1 module, required for testing ModuleAlias easyblock write_file(os.path.join(TMPDIR, 'modules', 'all', 'foo', '1.2.3.4.5'), "#%Module") for easyblock in easyblocks: eb_fn = os.path.basename(easyblock) # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest if eb_fn == 'systemcompiler.py': # use GCC as name when testing SystemCompiler easyblock innertest = make_inner_test(easyblock, name='GCC', version='system') elif eb_fn == 'systemmpi.py': # use OpenMPI as name when testing SystemMPI easyblock innertest = make_inner_test(easyblock, name='OpenMPI', version='system') elif eb_fn == 'craytoolchain.py': # make sure that a (known) PrgEnv is included as a dependency extra_txt = 'dependencies = [("PrgEnv-gnu/1.2.3", EXTERNAL_MODULE)]' innertest = make_inner_test(easyblock, name='CrayCC', extra_txt=extra_txt) elif eb_fn == 'modulerc.py': # exactly one dependency is included with ModuleRC generic easyblock (and name must match) extra_txt = 'dependencies = [("foo", "1.2.3.4.5")]' innertest = make_inner_test(easyblock, name='foo', version='1.2.3.4', extra_txt=extra_txt) elif eb_fn == 'intel_compilers.py': # custom easyblock for intel-compilers (oneAPI) requires v2021.x or newer innertest = make_inner_test(easyblock, name='intel-compilers', version='2021.1') elif eb_fn == 'openssl_wrapper.py': # easyblock to create OpenSSL wrapper expects an OpenSSL version innertest = make_inner_test(easyblock, name='OpenSSL-wrapper', version='1.1') elif eb_fn == 'ucx_plugins.py': # install fake ucx_info command (used in make_module_extra) tmpdir = tempfile.mkdtemp() install_fake_command('ucx_info', FAKE_UCX_INFO, tmpdir) innertest = make_inner_test(easyblock, name='UCX-CUDA', tmpdir=tmpdir) else: # Make up some unique name innertest = make_inner_test(easyblock, name=eb_fn.replace('.', '-') + '-sw') innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % '_'.join( easyblock.replace('.py', '').split('/')) setattr(ModuleOnlyTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): """ Main function: parse command line options, and act accordingly. :param args: command line arguments to use :param logfile: log file to use :param do_build: whether or not to actually perform the build :param testing: enable testing mode """ register_lock_cleanup_signal_handlers() # if $CDPATH is set, unset it, it'll only cause trouble... # see https://github.com/easybuilders/easybuild-framework/issues/2944 if 'CDPATH' in os.environ: del os.environ['CDPATH'] # purposely session state very early, to avoid modules loaded by EasyBuild meddling in init_session_state = session_state() eb_go, cfg_settings = set_up_configuration(args=args, logfile=logfile, testing=testing) options, orig_paths = eb_go.options, eb_go.args global _log (build_specs, _log, logfile, robot_path, search_query, eb_tmpdir, try_to_generate, tweaked_ecs_paths) = cfg_settings # load hook implementations (if any) hooks = load_hooks(options.hooks) run_hook(START, hooks) if modtool is None: modtool = modules_tool(testing=testing) # check whether any (EasyBuild-generated) modules are loaded already in the current session modtool.check_loaded_modules() if options.last_log: # print location to last log file, and exit last_log = find_last_log(logfile) or '(none)' print_msg(last_log, log=_log, prefix=False) # check whether packaging is supported when it's being used if options.package: check_pkg_support() else: _log.debug("Packaging not enabled, so not checking for packaging support.") # search for easyconfigs, if a query is specified if search_query: search_easyconfigs(search_query, short=options.search_short, filename_only=options.search_filename, terse=options.terse) # GitHub options that warrant a silent cleanup & exit if options.check_github: check_github() elif options.install_github_token: install_github_token(options.github_user, silent=build_option('silent')) elif options.close_pr: close_pr(options.close_pr, motivation_msg=options.close_pr_msg) elif options.list_prs: print(list_prs(options.list_prs)) elif options.merge_pr: merge_pr(options.merge_pr) elif options.review_pr: print(review_pr(pr=options.review_pr, colored=use_color(options.color))) elif options.list_installed_software: detailed = options.list_installed_software == 'detailed' print(list_software(output_format=options.output_format, detailed=detailed, only_installed=True)) elif options.list_software: print(list_software(output_format=options.output_format, detailed=options.list_software == 'detailed')) elif options.create_index: print_msg("Creating index for %s..." % options.create_index, prefix=False) index_fp = dump_index(options.create_index, max_age_sec=options.index_max_age) index = load_index(options.create_index) print_msg("Index created at %s (%d files)" % (index_fp, len(index)), prefix=False) # non-verbose cleanup after handling GitHub integration stuff or printing terse info early_stop_options = [ options.check_github, options.create_index, options.install_github_token, options.list_installed_software, options.list_software, options.close_pr, options.list_prs, options.merge_pr, options.review_pr, options.terse, search_query, ] if any(early_stop_options): clean_exit(logfile, eb_tmpdir, testing, silent=True) # update session state eb_config = eb_go.generate_cmd_line(add_default=True) modlist = modtool.list() # build options must be initialized first before 'module list' works init_session_state.update({'easybuild_configuration': eb_config}) init_session_state.update({'module_list': modlist}) _log.debug("Initial session state: %s" % init_session_state) # determine easybuild-easyconfigs package install path easyconfigs_pkg_paths = get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR) if not easyconfigs_pkg_paths: _log.warning("Failed to determine install path for easybuild-easyconfigs package.") if options.install_latest_eb_release: if orig_paths: raise EasyBuildError("Installing the latest EasyBuild release can not be combined with installing " "other easyconfigs") else: eb_file = find_easybuild_easyconfig() orig_paths.append(eb_file) if len(orig_paths) == 1: # if only one easyconfig file is specified, use current directory as target directory target_path = os.getcwd() elif orig_paths: # last path is target when --copy-ec is used, so remove that from the list target_path = orig_paths.pop() if options.copy_ec else None categorized_paths = categorize_files_by_type(orig_paths) # command line options that do not require any easyconfigs to be specified pr_options = options.new_branch_github or options.new_pr or options.new_pr_from_branch or options.preview_pr pr_options = pr_options or options.sync_branch_with_develop or options.sync_pr_with_develop pr_options = pr_options or options.update_branch_github or options.update_pr no_ec_opts = [options.aggregate_regtest, options.regtest, pr_options, search_query] # determine paths to easyconfigs determined_paths = det_easyconfig_paths(categorized_paths['easyconfigs']) if (options.copy_ec and not tweaked_ecs_paths) or options.fix_deprecated_easyconfigs or options.show_ec: if options.copy_ec: if len(determined_paths) == 1: copy_file(determined_paths[0], target_path) print_msg("%s copied to %s" % (os.path.basename(determined_paths[0]), target_path), prefix=False) elif len(determined_paths) > 1: copy_files(determined_paths, target_path) print_msg("%d file(s) copied to %s" % (len(determined_paths), target_path), prefix=False) else: raise EasyBuildError("One of more files to copy should be specified!") elif options.fix_deprecated_easyconfigs: fix_deprecated_easyconfigs(determined_paths) elif options.show_ec: for path in determined_paths: print_msg("Contents of %s:" % path) print_msg(read_file(path), prefix=False) clean_exit(logfile, eb_tmpdir, testing) if determined_paths: # transform paths into tuples, use 'False' to indicate the corresponding easyconfig files were not generated paths = [(p, False) for p in determined_paths] elif 'name' in build_specs: # try to obtain or generate an easyconfig file via build specifications if a software name is provided paths = find_easyconfigs_by_specs(build_specs, robot_path, try_to_generate, testing=testing) elif any(no_ec_opts): paths = determined_paths else: print_error("Please provide one or multiple easyconfig files, or use software build " + "options to make EasyBuild search for easyconfigs", log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) _log.debug("Paths: %s", paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") # fallback: easybuild-easyconfigs install path regtest_ok = regtest([path[0] for path in paths] or easyconfigs_pkg_paths, modtool) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 # read easyconfig files easyconfigs, generated_ecs = parse_easyconfigs(paths, validate=not options.inject_checksums) # handle --check-contrib & --check-style options if run_contrib_style_checks([ec['ec'] for ec in easyconfigs], options.check_contrib, options.check_style): clean_exit(logfile, eb_tmpdir, testing) # verify easyconfig filenames, if desired if options.verify_easyconfig_filenames: _log.info("Verifying easyconfig filenames...") for easyconfig in easyconfigs: verify_easyconfig_filename(easyconfig['spec'], easyconfig['ec'], parsed_ec=easyconfig['ec']) # tweak obtained easyconfig files, if requested # don't try and tweak anything if easyconfigs were generated, since building a full dep graph will fail # if easyconfig files for the dependencies are not available if try_to_generate and build_specs and not generated_ecs: easyconfigs = tweak(easyconfigs, build_specs, modtool, targetdirs=tweaked_ecs_paths) if options.containerize: # if --containerize/-C create a container recipe (and optionally container image), and stop containerize(easyconfigs) clean_exit(logfile, eb_tmpdir, testing) forced = options.force or options.rebuild dry_run_mode = options.dry_run or options.dry_run_short or options.missing_modules # skip modules that are already installed unless forced, or unless an option is used that warrants not skipping if not (forced or dry_run_mode or options.extended_dry_run or pr_options or options.inject_checksums): retained_ecs = skip_available(easyconfigs, modtool) if not testing: for skipped_ec in [ec for ec in easyconfigs if ec not in retained_ecs]: print_msg("%s is already installed (module found), skipping" % skipped_ec['full_mod_name']) easyconfigs = retained_ecs # keep track for which easyconfigs we should set the corresponding module as default if options.set_default_module: for easyconfig in easyconfigs: easyconfig['ec'].set_default_module = True # determine an order that will allow all specs in the set to build if len(easyconfigs) > 0: # resolve dependencies if robot is enabled, except in dry run mode # one exception: deps *are* resolved with --new-pr or --update-pr when dry run mode is enabled if options.robot and (not dry_run_mode or pr_options): print_msg("resolving dependencies ...", log=_log, silent=testing) ordered_ecs = resolve_dependencies(easyconfigs, modtool) else: ordered_ecs = easyconfigs elif pr_options: ordered_ecs = None else: print_msg("No easyconfigs left to be built.", log=_log, silent=testing) ordered_ecs = [] if options.copy_ec and tweaked_ecs_paths: all_specs = [spec['spec'] for spec in resolve_dependencies(easyconfigs, modtool, retain_all_deps=True, raise_error_missing_ecs=False)] tweaked_ecs_in_all_ecs = [path for path in all_specs if any(tweaked_ecs_path in path for tweaked_ecs_path in tweaked_ecs_paths)] if tweaked_ecs_in_all_ecs: # Clean them, then copy them clean_up_easyconfigs(tweaked_ecs_in_all_ecs) copy_files(tweaked_ecs_in_all_ecs, target_path) print_msg("%d file(s) copied to %s" % (len(tweaked_ecs_in_all_ecs), target_path), prefix=False) # creating/updating PRs if pr_options: if options.new_pr: new_pr(categorized_paths, ordered_ecs) elif options.new_branch_github: new_branch_github(categorized_paths, ordered_ecs) elif options.new_pr_from_branch: new_pr_from_branch(options.new_pr_from_branch) elif options.preview_pr: print(review_pr(paths=determined_paths, colored=use_color(options.color))) elif options.sync_branch_with_develop: sync_branch_with_develop(options.sync_branch_with_develop) elif options.sync_pr_with_develop: sync_pr_with_develop(options.sync_pr_with_develop) elif options.update_branch_github: update_branch(options.update_branch_github, categorized_paths, ordered_ecs) elif options.update_pr: update_pr(options.update_pr, categorized_paths, ordered_ecs) else: raise EasyBuildError("Unknown PR option!") # dry_run: print all easyconfigs and dependencies, and whether they are already built elif dry_run_mode: if options.missing_modules: txt = missing_deps(easyconfigs, modtool) else: txt = dry_run(easyconfigs, modtool, short=not options.dry_run) print_msg(txt, log=_log, silent=testing, prefix=False) elif options.check_conflicts: if check_conflicts(easyconfigs, modtool): print_error("One or more conflicts detected!") sys.exit(1) else: print_msg("\nNo conflicts detected!\n", prefix=False) # dump source script to set up build environment elif options.dump_env_script: dump_env_script(easyconfigs) elif options.inject_checksums: inject_checksums(ordered_ecs, options.inject_checksums) # cleanup and exit after dry run, searching easyconfigs or submitting regression test stop_options = [options.check_conflicts, dry_run_mode, options.dump_env_script, options.inject_checksums] if any(no_ec_opts) or any(stop_options): clean_exit(logfile, eb_tmpdir, testing) # create dependency graph and exit if options.dep_graph: _log.info("Creating dependency graph %s" % options.dep_graph) dep_graph(options.dep_graph, ordered_ecs) clean_exit(logfile, eb_tmpdir, testing, silent=True) # submit build as job(s), clean up and exit if options.job: submit_jobs(ordered_ecs, eb_go.generate_cmd_line(), testing=testing) if not testing: print_msg("Submitted parallel build jobs, exiting now") clean_exit(logfile, eb_tmpdir, testing) # build software, will exit when errors occurs (except when testing) if not testing or (testing and do_build): exit_on_failure = not (options.dump_test_report or options.upload_test_report) ecs_with_res = build_and_install_software(ordered_ecs, init_session_state, exit_on_failure=exit_on_failure) else: ecs_with_res = [(ec, {}) for ec in ordered_ecs] correct_builds_cnt = len([ec_res for (_, ec_res) in ecs_with_res if ec_res.get('success', False)]) overall_success = correct_builds_cnt == len(ordered_ecs) success_msg = "Build succeeded for %s out of %s" % (correct_builds_cnt, len(ordered_ecs)) repo = init_repository(get_repository(), get_repositorypath()) repo.cleanup() # dump/upload overall test report test_report_msg = overall_test_report(ecs_with_res, len(paths), overall_success, success_msg, init_session_state) if test_report_msg is not None: print_msg(test_report_msg) print_msg(success_msg, log=_log, silent=testing) # cleanup and spec files for ec in easyconfigs: if 'original_spec' in ec and os.path.isfile(ec['spec']): os.remove(ec['spec']) run_hook(END, hooks) # stop logging and cleanup tmp log file, unless one build failed (individual logs are located in eb_tmpdir) stop_logging(logfile, logtostdout=options.logtostdout) if overall_success: cleanup(logfile, eb_tmpdir, testing)
class EasyConfigTest(TestCase): """Baseclass for easyconfig testcases.""" # initialize configuration (required for e.g. default modules_tool setting) eb_go = eboptions.parse_options() config.init(eb_go.options, eb_go.get_options_by_section('config')) build_options = { 'check_osdeps': False, 'external_modules_metadata': {}, 'force': True, 'optarch': 'test', 'robot_path': get_paths_for("easyconfigs")[0], 'silent': True, 'suffix_modules_path': GENERAL_CLASS, 'valid_module_classes': config.module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } config.init_build_options(build_options=build_options) set_tmpdir() del eb_go # put dummy 'craype-test' module in place, which is required for parsing easyconfigs using Cray* toolchains TMPDIR = tempfile.mkdtemp() os.environ['MODULEPATH'] = TMPDIR write_file(os.path.join(TMPDIR, 'craype-test'), '#%Module\n') log = fancylogger.getLogger("EasyConfigTest", fname=False) # make sure a logger is present for main main._log = log ordered_specs = None parsed_easyconfigs = [] def process_all_easyconfigs(self): """Process all easyconfigs and resolve inter-easyconfig dependencies.""" # all available easyconfig files easyconfigs_path = get_paths_for("easyconfigs")[0] specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path) # parse all easyconfigs if they haven't been already if not self.parsed_easyconfigs: for spec in specs: self.parsed_easyconfigs.extend(process_easyconfig(spec)) # filter out external modules for ec in self.parsed_easyconfigs: for dep in ec['dependencies'][:]: if dep.get('external_module', False): ec['dependencies'].remove(dep) self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs, retain_all_deps=True) def test_dep_graph(self): """Unit test that builds a full dependency graph.""" # pygraph dependencies required for constructing dependency graph are not available prior to Python 2.6 if LooseVersion( sys.version) >= LooseVersion('2.6') and single_tests_ok: # temporary file for dep graph (hn, fn) = tempfile.mkstemp(suffix='.dot') os.close(hn) if self.ordered_specs is None: self.process_all_easyconfigs() dep_graph(fn, self.ordered_specs) try: os.remove(fn) except OSError, err: log.error("Failed to remove %s: %s" % (fn, err)) else:
def main(testing_data=(None, None, None)): """ Main function: parse command line options, and act accordingly. @param testing_data: tuple with command line arguments, log file and boolean indicating whether or not to build """ # purposely session state very early, to avoid modules loaded by EasyBuild meddling in init_session_state = session_state() # steer behavior when testing main testing = testing_data[0] is not None args, logfile, do_build = testing_data # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args # set umask (as early as possible) if options.umask is not None: new_umask = int(options.umask, 8) old_umask = os.umask(new_umask) # set temporary directory to use eb_tmpdir = set_tmpdir(options.tmpdir) # initialise logging for main global _log _log, logfile = init_logging(logfile, logtostdout=options.logtostdout, testing=testing) # disallow running EasyBuild as root if os.getuid() == 0: raise EasyBuildError( "You seem to be running EasyBuild with root privileges which is not wise, " "so let's end this here.") # log startup info eb_cmd_line = eb_go.generate_cmd_line() + eb_go.args log_start(eb_cmd_line, eb_tmpdir) if options.umask is not None: _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask))) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, build_specs) = process_software_build_specs(options) # determine robot path # --try-X, --dep-graph, --search use robot path for searching, so enable it with path of installed easyconfigs tweaked_ecs = try_to_generate and build_specs tweaked_ecs_path, pr_path = alt_easyconfig_paths(eb_tmpdir, tweaked_ecs=tweaked_ecs, from_pr=options.from_pr) auto_robot = try_to_generate or options.dep_graph or options.search or options.search_short robot_path = det_robot_path(options.robot_paths, tweaked_ecs_path, pr_path, auto_robot=auto_robot) _log.debug("Full robot path: %s" % robot_path) # configure & initialize build options config_options_dict = eb_go.get_options_by_section('config') build_options = { 'build_specs': build_specs, 'command_line': eb_cmd_line, 'pr_path': pr_path, 'robot_path': robot_path, 'silent': testing, 'try_to_generate': try_to_generate, 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } # initialise the EasyBuild configuration & build options config.init(options, config_options_dict) config.init_build_options(build_options=build_options, cmdline_options=options) # update session state eb_config = eb_go.generate_cmd_line(add_default=True) modlist = session_module_list( testing=testing ) # build options must be initialized first before 'module list' works init_session_state.update({'easybuild_configuration': eb_config}) init_session_state.update({'module_list': modlist}) _log.debug("Initial session state: %s" % init_session_state) # search for easyconfigs, if a query is specified query = options.search or options.search_short if query: search_easyconfigs(query, short=not options.search) # determine easybuild-easyconfigs package install path easyconfigs_pkg_paths = get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR) if not easyconfigs_pkg_paths: _log.warning( "Failed to determine install path for easybuild-easyconfigs package." ) # determine paths to easyconfigs paths = det_easyconfig_paths(orig_paths) if paths: # transform paths into tuples, use 'False' to indicate the corresponding easyconfig files were not generated paths = [(p, False) for p in paths] else: if 'name' in build_specs: # try to obtain or generate an easyconfig file via build specifications if a software name is provided paths = find_easyconfigs_by_specs(build_specs, robot_path, try_to_generate, testing=testing) elif not any([ options.aggregate_regtest, options.search, options.search_short, options.regtest ]): print_error(( "Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") # fallback: easybuild-easyconfigs install path regtest_ok = regtest([path[0] for path in paths] or easyconfigs_pkg_paths) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 # read easyconfig files easyconfigs, generated_ecs = parse_easyconfigs(paths) # tweak obtained easyconfig files, if requested # don't try and tweak anything if easyconfigs were generated, since building a full dep graph will fail # if easyconfig files for the dependencies are not available if try_to_generate and build_specs and not generated_ecs: easyconfigs = tweak(easyconfigs, build_specs, targetdir=tweaked_ecs_path) # dry_run: print all easyconfigs and dependencies, and whether they are already built if options.dry_run or options.dry_run_short: txt = dry_run(easyconfigs, short=not options.dry_run, build_specs=build_specs) print_msg(txt, log=_log, silent=testing, prefix=False) # cleanup and exit after dry run, searching easyconfigs or submitting regression test if any([ options.dry_run, options.dry_run_short, options.regtest, options.search, options.search_short ]): cleanup(logfile, eb_tmpdir, testing) sys.exit(0) # skip modules that are already installed unless forced if not options.force: retained_ecs = skip_available(easyconfigs) if not testing: for skipped_ec in [ ec for ec in easyconfigs if ec not in retained_ecs ]: print_msg("%s is already installed (module found), skipping" % skipped_ec['full_mod_name']) easyconfigs = retained_ecs # determine an order that will allow all specs in the set to build if len(easyconfigs) > 0: if options.robot: print_msg("resolving dependencies ...", log=_log, silent=testing) ordered_ecs = resolve_dependencies(easyconfigs, build_specs=build_specs) else: ordered_ecs = easyconfigs else: print_msg("No easyconfigs left to be built.", log=_log, silent=testing) ordered_ecs = [] # create dependency graph and exit if options.dep_graph: _log.info("Creating dependency graph %s" % options.dep_graph) dep_graph(options.dep_graph, ordered_ecs) sys.exit(0) # submit build as job(s), clean up and exit if options.job: job_info_txt = submit_jobs(ordered_ecs, eb_go.generate_cmd_line(), testing=testing) if not testing: print_msg("Submitted parallel build jobs, exiting now: %s" % job_info_txt) cleanup(logfile, eb_tmpdir, testing) sys.exit(0) # build software, will exit when errors occurs (except when testing) exit_on_failure = not options.dump_test_report and not options.upload_test_report if not testing or (testing and do_build): ecs_with_res = build_and_install_software( ordered_ecs, init_session_state, exit_on_failure=exit_on_failure) else: ecs_with_res = [(ec, {}) for ec in ordered_ecs] correct_builds_cnt = len([ ec_res for (_, ec_res) in ecs_with_res if ec_res.get('success', False) ]) overall_success = correct_builds_cnt == len(ordered_ecs) success_msg = "Build succeeded for %s out of %s" % (correct_builds_cnt, len(ordered_ecs)) repo = init_repository(get_repository(), get_repositorypath()) repo.cleanup() # dump/upload overall test report test_report_msg = overall_test_report(ecs_with_res, len(paths), overall_success, success_msg, init_session_state) if test_report_msg is not None: print_msg(test_report_msg) print_msg(success_msg, log=_log, silent=testing) # cleanup and spec files for ec in easyconfigs: if 'original_spec' in ec and os.path.isfile(ec['spec']): os.remove(ec['spec']) # stop logging and cleanup tmp log file, unless one build failed (individual logs are located in eb_tmpdir path) stop_logging(logfile, logtostdout=options.logtostdout) if overall_success: cleanup(logfile, eb_tmpdir, testing)
def main(testing_data=(None, None, None)): """ Main function: @arg options: a tuple: (options, paths, logger, logfile, hn) as defined in parse_options This function will: - read easyconfig - build software """ # disallow running EasyBuild as root if os.getuid() == 0: sys.stderr.write("ERROR: You seem to be running EasyBuild with root privileges.\n" "That's not wise, so let's end this here.\n" "Exiting.\n") sys.exit(1) # steer behavior when testing main testing = testing_data[0] is not None args, logfile, do_build = testing_data # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args # set umask (as early as possible) if options.umask is not None: new_umask = int(options.umask, 8) old_umask = os.umask(new_umask) # set temporary directory to use eb_tmpdir = set_tmpdir(options.tmpdir) # initialise logging for main if options.logtostdout: fancylogger.logToScreen(enable=True, stdout=True) else: if logfile is None: # mkstemp returns (fd,filename), fd is from os.open, not regular open! fd, logfile = tempfile.mkstemp(suffix='.log', prefix='easybuild-') os.close(fd) fancylogger.logToFile(logfile) print_msg('temporary log file in case of crash %s' % (logfile), log=None, silent=testing) global _log _log = fancylogger.getLogger(fname=False) if options.umask is not None: _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask))) # hello world! _log.info(this_is_easybuild()) # how was EB called? eb_command_line = eb_go.generate_cmd_line() + eb_go.args _log.info("Command line: %s" % (" ".join(eb_command_line))) _log.info("Using %s as temporary directory" % eb_tmpdir) if not options.robot is None: if options.robot: _log.info("Using robot path(s): %s" % options.robot) else: _log.error("No robot paths specified, and unable to determine easybuild-easyconfigs install path.") # do not pass options.robot, it's not a list instance (and it shouldn't be modified) robot_path = None if options.robot: robot_path = list(options.robot) # determine easybuild-easyconfigs package install path easyconfigs_paths = get_paths_for("easyconfigs", robot_path=robot_path) # keep track of paths for install easyconfigs, so we can obtain find specified easyconfigs easyconfigs_pkg_full_paths = easyconfigs_paths[:] if not easyconfigs_paths: _log.warning("Failed to determine install path for easybuild-easyconfigs package.") # specified robot paths are preferred over installed easyconfig files if robot_path: robot_path.extend(easyconfigs_paths) easyconfigs_paths = robot_path[:] _log.info("Extended list of robot paths with paths for installed easyconfigs: %s" % robot_path) # initialise the easybuild configuration config.init(options, eb_go.get_options_by_section('config')) # building a dependency graph implies force, so that all dependencies are retained # and also skips validation of easyconfigs (e.g. checking os dependencies) retain_all_deps = False if options.dep_graph: _log.info("Enabling force to generate dependency graph.") options.force = True retain_all_deps = True config.init_build_options({ 'aggregate_regtest': options.aggregate_regtest, 'allow_modules_tool_mismatch': options.allow_modules_tool_mismatch, 'check_osdeps': not options.ignore_osdeps, 'command_line': eb_command_line, 'debug': options.debug, 'dry_run': options.dry_run, 'easyblock': options.easyblock, 'experimental': options.experimental, 'force': options.force, 'group': options.group, 'ignore_dirs': options.ignore_dirs, 'modules_footer': options.modules_footer, 'only_blocks': options.only_blocks, 'recursive_mod_unload': options.recursive_module_unload, 'regtest_online': options.regtest_online, 'regtest_output_dir': options.regtest_output_dir, 'retain_all_deps': retain_all_deps, 'robot_path': robot_path, 'sequential': options.sequential, 'silent': testing, 'set_gid_bit': options.set_gid_bit, 'skip': options.skip, 'skip_test_cases': options.skip_test_cases, 'sticky_bit': options.sticky_bit, 'stop': options.stop, 'umask': options.umask, 'valid_module_classes': module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], 'validate': not options.force, }) # search for easyconfigs if options.search or options.search_short: search_path = [os.getcwd()] if easyconfigs_paths: search_path = easyconfigs_paths query = options.search or options.search_short ignore_dirs = config.build_option('ignore_dirs') silent = config.build_option('silent') search_file(search_path, query, short=not options.search, ignore_dirs=ignore_dirs, silent=silent) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, build_specs) = process_software_build_specs(options) paths = [] if len(orig_paths) == 0: if 'name' in build_specs: paths = [obtain_path(build_specs, easyconfigs_paths, try_to_generate=try_to_generate, exit_on_error=not testing)] elif not any([options.aggregate_regtest, options.search, options.search_short, options.regtest]): print_error(("Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) else: # look for easyconfigs with relative paths in easybuild-easyconfigs package, # unless they were found at the given relative paths if easyconfigs_pkg_full_paths: # determine which easyconfigs files need to be found, if any ecs_to_find = [] for idx, orig_path in enumerate(orig_paths): if orig_path == os.path.basename(orig_path) and not os.path.exists(orig_path): ecs_to_find.append((idx, orig_path)) _log.debug("List of easyconfig files to find: %s" % ecs_to_find) # find missing easyconfigs by walking paths with installed easyconfig files for path in easyconfigs_pkg_full_paths: _log.debug("Looking for missing easyconfig files (%d left) in %s..." % (len(ecs_to_find), path)) for (subpath, dirnames, filenames) in os.walk(path, topdown=True): for idx, orig_path in ecs_to_find[:]: if orig_path in filenames: full_path = os.path.join(subpath, orig_path) _log.info("Found %s in %s: %s" % (orig_path, path, full_path)) orig_paths[idx] = full_path # if file was found, stop looking for it (first hit wins) ecs_to_find.remove((idx, orig_path)) # stop os.walk insanity as soon as we have all we need (os.walk loop) if len(ecs_to_find) == 0: break # ignore subdirs specified to be ignored by replacing items in dirnames list used by os.walk dirnames[:] = [d for d in dirnames if not d in options.ignore_dirs] # stop os.walk insanity as soon as we have all we need (paths loop) if len(ecs_to_find) == 0: break # indicate that specified paths do not contain generated easyconfig files paths = [(path, False) for path in orig_paths] _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") if paths: ec_paths = [path[0] for path in paths] else: # fallback: easybuild-easyconfigs install path ec_paths = easyconfigs_pkg_full_paths regtest_ok = regtest(ec_paths) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 # read easyconfig files easyconfigs = [] for (path, generated) in paths: path = os.path.abspath(path) if not os.path.exists(path): print_error("Can't find path %s" % path) try: files = find_easyconfigs(path, ignore_dirs=options.ignore_dirs) for f in files: if not generated and try_to_generate and build_specs: ec_file = tweak(f, None, build_specs) else: ec_file = f ecs = process_easyconfig(ec_file, build_specs=build_specs) easyconfigs.extend(ecs) except IOError, err: _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))
def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): """ Main function: parse command line options, and act accordingly. :param args: command line arguments to use :param logfile: log file to use :param do_build: whether or not to actually perform the build :param testing: enable testing mode """ # purposely session state very early, to avoid modules loaded by EasyBuild meddling in init_session_state = session_state() # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args # set umask (as early as possible) if options.umask is not None: new_umask = int(options.umask, 8) old_umask = os.umask(new_umask) # set by option parsers via set_tmpdir eb_tmpdir = tempfile.gettempdir() search_query = options.search or options.search_filename or options.search_short # initialise logging for main global _log _log, logfile = init_logging(logfile, logtostdout=options.logtostdout, silent=(testing or options.terse or search_query), colorize=options.color) # disallow running EasyBuild as root if os.getuid() == 0: raise EasyBuildError("You seem to be running EasyBuild with root privileges which is not wise, " "so let's end this here.") # log startup info eb_cmd_line = eb_go.generate_cmd_line() + eb_go.args log_start(eb_cmd_line, eb_tmpdir) if options.umask is not None: _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask))) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, build_specs) = process_software_build_specs(options) # determine robot path # --try-X, --dep-graph, --search use robot path for searching, so enable it with path of installed easyconfigs tweaked_ecs = try_to_generate and build_specs tweaked_ecs_paths, pr_path = alt_easyconfig_paths(eb_tmpdir, tweaked_ecs=tweaked_ecs, from_pr=options.from_pr) auto_robot = try_to_generate or options.check_conflicts or options.dep_graph or search_query robot_path = det_robot_path(options.robot_paths, tweaked_ecs_paths, pr_path, auto_robot=auto_robot) _log.debug("Full robot path: %s" % robot_path) # configure & initialize build options config_options_dict = eb_go.get_options_by_section('config') build_options = { 'build_specs': build_specs, 'command_line': eb_cmd_line, 'external_modules_metadata': parse_external_modules_metadata(options.external_modules_metadata), 'pr_path': pr_path, 'robot_path': robot_path, 'silent': testing, 'try_to_generate': try_to_generate, 'valid_stops': [x[0] for x in EasyBlock.get_steps()], } # initialise the EasyBuild configuration & build options config.init(options, config_options_dict) config.init_build_options(build_options=build_options, cmdline_options=options) if modtool is None: modtool = modules_tool(testing=testing) if options.last_log: # print location to last log file, and exit last_log = find_last_log(logfile) or '(none)' print_msg(last_log, log=_log, prefix=False) # check whether packaging is supported when it's being used if options.package: check_pkg_support() else: _log.debug("Packaging not enabled, so not checking for packaging support.") # search for easyconfigs, if a query is specified if search_query: search_easyconfigs(search_query, short=options.search_short, filename_only=options.search_filename, terse=options.terse) # GitHub options that warrant a silent cleanup & exit if options.check_github: check_github() elif options.install_github_token: install_github_token(options.github_user, silent=build_option('silent')) elif options.review_pr: print review_pr(options.review_pr, colored=use_color(options.color)) elif options.list_installed_software: detailed = options.list_installed_software == 'detailed' print list_software(output_format=options.output_format, detailed=detailed, only_installed=True) elif options.list_software: print list_software(output_format=options.output_format, detailed=options.list_software == 'detailed') # non-verbose cleanup after handling GitHub integration stuff or printing terse info early_stop_options = [ options.check_github, options.install_github_token, options.list_installed_software, options.list_software, options.review_pr, options.terse, search_query, ] if any(early_stop_options): cleanup(logfile, eb_tmpdir, testing, silent=True) sys.exit(0) # update session state eb_config = eb_go.generate_cmd_line(add_default=True) modlist = modtool.list() # build options must be initialized first before 'module list' works init_session_state.update({'easybuild_configuration': eb_config}) init_session_state.update({'module_list': modlist}) _log.debug("Initial session state: %s" % init_session_state) # determine easybuild-easyconfigs package install path easyconfigs_pkg_paths = get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR) if not easyconfigs_pkg_paths: _log.warning("Failed to determine install path for easybuild-easyconfigs package.") if options.install_latest_eb_release: if orig_paths: raise EasyBuildError("Installing the latest EasyBuild release can not be combined with installing " "other easyconfigs") else: eb_file = find_easybuild_easyconfig() orig_paths.append(eb_file) categorized_paths = categorize_files_by_type(orig_paths) # command line options that do not require any easyconfigs to be specified no_ec_opts = [options.aggregate_regtest, options.new_pr, options.regtest, options.update_pr, search_query] # determine paths to easyconfigs paths = det_easyconfig_paths(categorized_paths['easyconfigs']) if paths: # transform paths into tuples, use 'False' to indicate the corresponding easyconfig files were not generated paths = [(p, False) for p in paths] else: if 'name' in build_specs: # try to obtain or generate an easyconfig file via build specifications if a software name is provided paths = find_easyconfigs_by_specs(build_specs, robot_path, try_to_generate, testing=testing) elif not any(no_ec_opts): print_error(("Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") # fallback: easybuild-easyconfigs install path regtest_ok = regtest([path[0] for path in paths] or easyconfigs_pkg_paths, modtool) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 if options.check_style: _log.debug("Running style check...") if cmdline_easyconfigs_style_check([path[0] for path in paths]): print_msg("All style checks passed!", prefix=False) cleanup(logfile, eb_tmpdir, testing) sys.exit(0) else: raise EasyBuildError("One or more style checks FAILED!") # read easyconfig files easyconfigs, generated_ecs = parse_easyconfigs(paths) # tweak obtained easyconfig files, if requested # don't try and tweak anything if easyconfigs were generated, since building a full dep graph will fail # if easyconfig files for the dependencies are not available if try_to_generate and build_specs and not generated_ecs: easyconfigs = tweak(easyconfigs, build_specs, modtool, targetdirs=tweaked_ecs_paths) dry_run_mode = options.dry_run or options.dry_run_short new_update_pr = options.new_pr or options.update_pr # skip modules that are already installed unless forced if not (options.force or options.rebuild or dry_run_mode or options.extended_dry_run or new_update_pr): retained_ecs = skip_available(easyconfigs, modtool) if not testing: for skipped_ec in [ec for ec in easyconfigs if ec not in retained_ecs]: print_msg("%s is already installed (module found), skipping" % skipped_ec['full_mod_name']) easyconfigs = retained_ecs # determine an order that will allow all specs in the set to build if len(easyconfigs) > 0: # resolve dependencies if robot is enabled, except in dry run mode # one exception: deps *are* resolved with --new-pr or --update-pr when dry run mode is enabled if options.robot and (not dry_run_mode or new_update_pr): print_msg("resolving dependencies ...", log=_log, silent=testing) ordered_ecs = resolve_dependencies(easyconfigs, modtool) else: ordered_ecs = easyconfigs elif new_update_pr: ordered_ecs = None else: print_msg("No easyconfigs left to be built.", log=_log, silent=testing) ordered_ecs = [] # creating/updating PRs if new_update_pr: if options.new_pr: new_pr(categorized_paths, ordered_ecs, title=options.pr_title, descr=options.pr_descr, commit_msg=options.pr_commit_msg) else: update_pr(options.update_pr, categorized_paths, ordered_ecs, commit_msg=options.pr_commit_msg) cleanup(logfile, eb_tmpdir, testing, silent=True) sys.exit(0) # dry_run: print all easyconfigs and dependencies, and whether they are already built elif dry_run_mode: txt = dry_run(easyconfigs, modtool, short=not options.dry_run) print_msg(txt, log=_log, silent=testing, prefix=False) elif options.check_conflicts: if check_conflicts(easyconfigs, modtool): print_error("One or more conflicts detected!") sys.exit(1) else: print_msg("\nNo conflicts detected!\n", prefix=False) # dump source script to set up build environment elif options.dump_env_script: dump_env_script(easyconfigs) # cleanup and exit after dry run, searching easyconfigs or submitting regression test if any(no_ec_opts + [options.check_conflicts, dry_run_mode, options.dump_env_script]): cleanup(logfile, eb_tmpdir, testing) sys.exit(0) # create dependency graph and exit if options.dep_graph: _log.info("Creating dependency graph %s" % options.dep_graph) dep_graph(options.dep_graph, ordered_ecs) cleanup(logfile, eb_tmpdir, testing, silent=True) sys.exit(0) # submit build as job(s), clean up and exit if options.job: submit_jobs(ordered_ecs, eb_go.generate_cmd_line(), testing=testing) if not testing: print_msg("Submitted parallel build jobs, exiting now") cleanup(logfile, eb_tmpdir, testing) sys.exit(0) # build software, will exit when errors occurs (except when testing) exit_on_failure = not options.dump_test_report and not options.upload_test_report if not testing or (testing and do_build): ecs_with_res = build_and_install_software(ordered_ecs, init_session_state, exit_on_failure=exit_on_failure) else: ecs_with_res = [(ec, {}) for ec in ordered_ecs] correct_builds_cnt = len([ec_res for (_, ec_res) in ecs_with_res if ec_res.get('success', False)]) overall_success = correct_builds_cnt == len(ordered_ecs) success_msg = "Build succeeded for %s out of %s" % (correct_builds_cnt, len(ordered_ecs)) repo = init_repository(get_repository(), get_repositorypath()) repo.cleanup() # dump/upload overall test report test_report_msg = overall_test_report(ecs_with_res, len(paths), overall_success, success_msg, init_session_state) if test_report_msg is not None: print_msg(test_report_msg) print_msg(success_msg, log=_log, silent=testing) # cleanup and spec files for ec in easyconfigs: if 'original_spec' in ec and os.path.isfile(ec['spec']): os.remove(ec['spec']) # stop logging and cleanup tmp log file, unless one build failed (individual logs are located in eb_tmpdir) stop_logging(logfile, logtostdout=options.logtostdout) if overall_success: cleanup(logfile, eb_tmpdir, testing)