def jaotc_test(args): """run (acceptance) tests for the AOT compiler (jaotc)""" all_tests = ['HelloWorld', 'java.base', 'javac'] parser = ArgumentParser(prog='mx jaotc-test') parser.add_argument("--list", default=None, action="store_true", help="Print the list of available jaotc tests.") parser.add_argument('tests', help='tests to run (omit to run all tests)', nargs=ZERO_OR_MORE) args = parser.parse_args(args) if args.list: print "The following jaotc tests are available:\n" for name in all_tests: print " " + name return tests = args.tests or all_tests for test in tests: mx.log('Testing `{}`'.format(test)) if test == 'HelloWorld': test_class( classpath=mx.classpath('JAOTC_TEST'), main_class='jdk.tools.jaotc.test.HelloWorld' ) elif test == 'javac': test_javac('jdk.tools.jaotc') elif test == 'java.base': test_modules( classpath=mx.project('jdk.tools.jaotc.test').output_dir(), main_class='jdk.tools.jaotc.test.HelloWorld', modules=['java.base'] ) else: mx.abort('Unknown jaotc test: {}'.format(test))
def _find_matches(results, match_string, print_matches, list_file_matches, match_klass_name): if match_klass_name: mod = sys.modules[__name__] match_klass = getattr(mod, match_klass_name) if not match_klass: mx.abort('no such function: ' + match_klass_name) match_klass_instance = match_klass(print_matches, list_file_matches, match_string) else: match_klass_instance = DefaultMatchClass(print_matches, list_file_matches, match_string) for result in results: lines = result.rawData.split("\n") i = 0 for line in lines: if match_string in line: # search backward for package install trace j = i pkgname = None while j > 0: linej = lines[j] if 'installing *source* package' in linej: pkgname = _extract_pkgname(line) match_klass_instance.record(pkgname, line, str(result)) break j = j - 1 if pkgname is None: print 'failed to find installing trace starting at line: ' + str(i) i = i + 1 match_klass_instance.output()
def _tck(args): """runs TCK tests""" parser = ArgumentParser(prog="mx tck", description="run the TCK tests", formatter_class=RawDescriptionHelpFormatter, epilog=_debuggertestHelpSuffix) parser.add_argument("--tck-configuration", help="TCK configuration", choices=["compile", "debugger", "default"], default="default") parsed_args, args = parser.parse_known_args(args) tckConfiguration = parsed_args.tck_configuration index = len(args) for arg in reversed(args): if arg.startswith("-"): break index = index - 1 args_no_tests = args[0:index] tests = args[index:len(args)] if len(tests) == 0: tests = ["com.oracle.truffle.tck.tests"] index = len(args_no_tests) for arg in reversed(args_no_tests): if arg.startswith("--"): break index = index - 1 unitTestOptions = args_no_tests[0:max(index-1, 0)] jvmOptions = args_no_tests[index:len(args_no_tests)] if tckConfiguration == "default": unittest(unitTestOptions + ["--"] + jvmOptions + tests) elif tckConfiguration == "debugger": with mx.SafeFileCreation(os.path.join(tempfile.gettempdir(), "debugalot")) as sfc: _execute_debugger_test(tests, sfc.tmpPath, False, unitTestOptions, jvmOptions) elif tckConfiguration == "compile": if not _is_graalvm(mx.get_jdk()): mx.abort("The 'compile' TCK configuration requires graalvm execution, run with --java-home=<path_to_graalvm>.") unittest(unitTestOptions + ["--"] + jvmOptions + ["-Dgraal.TruffleCompileImmediately=true", "-Dgraal.TruffleCompilationExceptionsAreThrown=true"] + tests)
def cc(args): _log('fastr:cc', args) compiler = None sulong = _sulong() if sulong: analyzed_args = _analyze_args(args) if _is_linux(): rc = sulong.compileWithGCC(analyzed_args.compile_args) if rc == 0 and analyzed_args.llvm_ir_file: if not analyzed_args.is_link: rc = sulong.compileWithGCC(analyzed_args.emit_llvm_args) elif _is_darwin(): rc = sulong.compileWithClang(analyzed_args.compile_args) if rc == 0 and analyzed_args.llvm_ir_file: if not analyzed_args.is_link: rc = sulong.compileWithClang(analyzed_args.emit_llvm_args) else: mx.abort('unsupported platform') if rc == 0 and not analyzed_args.is_link and analyzed_args.llvm_ir_file: rc = _mem2reg_opt(analyzed_args.llvm_ir_file) if rc == 0: rc = _embed_ir(analyzed_args.llvm_ir_file) else: if _is_linux(): compiler = 'gcc' elif _is_darwin(): compiler = 'clang' else: mx.abort('unsupported platform') rc = mx.run([compiler] + args, nonZeroIsFatal=False) return rc
def get_java_module_info(dist, fatalIfNotModule=False): """ Gets the metadata for the module derived from `dist`. :param JARDistribution dist: a distribution possibly defining a module :param bool fatalIfNotModule: specifies whether to abort if `dist` does not define a module :return: None if `dist` does not define a module otherwise a tuple containing the name of the module, the directory in which the class files (including module-info.class) for the module are staged and finally the path to the jar file containing the built module """ if dist.suite.getMxCompatibility().moduleDepsEqualDistDeps(): moduleName = getattr(dist, 'moduleName', None) if not moduleName: if fatalIfNotModule: mx.abort('Distribution ' + dist.name + ' does not define a module') return None assert len(moduleName) > 0, '"moduleName" attribute of distribution ' + dist.name + ' cannot be empty' else: if not get_module_deps(dist): if fatalIfNotModule: mx.abort('Module for distribution ' + dist.name + ' would be empty') return None moduleName = dist.name.replace('_', '.').lower() modulesDir = mx.ensure_dir_exists(join(dist.suite.get_output_root(), 'modules')) moduleDir = mx.ensure_dir_exists(join(modulesDir, moduleName)) moduleJar = join(modulesDir, moduleName + '.jar') return moduleName, moduleDir, moduleJar
def __init__(self, title, tasks=None, disableJacoco=False): self.tasks = tasks self.title = title self.skipped = False if tasks is not None: for t in tasks: if t.title == title: mx.abort('Gate task with title "' + title + '" is already defined') if Task.startAtFilter: assert not Task.filters if Task.startAtFilter in title: self.skipped = False Task.startAtFilter = None else: self.skipped = True elif Task.filters: if Task.filtersExclude: self.skipped = any([f in title for f in Task.filters]) else: self.skipped = not any([f in title for f in Task.filters]) if not self.skipped: self.start = time.time() self.end = None self.duration = None self.disableJacoco = disableJacoco mx.log(time.strftime('gate: %d %b %Y %H:%M:%S: BEGIN: ') + title)
def jacocoreport(args): """create a JaCoCo coverage report Creates the report from the 'jacoco.exec' file in the current directory. Default output directory is 'coverage', but an alternative can be provided as an argument.""" jacocoreport = mx.library("JACOCOREPORT", True) out = 'coverage' if len(args) == 1: out = args[0] elif len(args) > 1: mx.abort('jacocoreport takes only one argument : an output directory') includes = list(_jacoco_includes) for p in mx.projects(): projsetting = getattr(p, 'jacoco', '') if projsetting == 'include' or projsetting == '': includes.append(p.name) includedirs = set() for p in mx.projects(): projsetting = getattr(p, 'jacoco', '') if projsetting == 'exclude': continue for include in includes: if include in p.dir: includedirs.add(p.dir) for i in includedirs: bindir = i + '/bin' mx.ensure_dir_exists(bindir) mx.run_java(['-jar', jacocoreport.get_path(True), '--in', 'jacoco.exec', '--out', out] + sorted(includedirs))
def verify_jvmci_ci_versions(args=None, extraVMarguments=None): version_pattern = re.compile(r'^(?!\s*#).*jvmci-(?P<version>\d*\.\d*)') def _grep_version(files, msg): version = None last = None linenr = 0 for filename in files: for line in open(filename): m = version_pattern.search(line) if m: new_version = m.group('version') if version and version != new_version: mx.abort( os.linesep.join([ "Multiple JVMCI versions found in {0} files:".format(msg), " {0} in {1}:{2}: {3}".format(version, *last), " {0} in {1}:{2}: {3}".format(new_version, filename, linenr, line), ])) last = (filename, linenr, line.rstrip()) version = new_version linenr = linenr + 1 if not version: mx.abort("No JVMCI version found in {0} files!".format(msg)) return version hocon_version = _grep_version(glob.glob(join(mx.primary_suite().dir, 'ci*.hocon')) + glob.glob(join(mx.primary_suite().dir, 'ci*/*.hocon')), 'ci.hocon') travis_version = _grep_version(glob.glob('.travis.yml'), 'TravisCI') if hocon_version != travis_version: mx.abort("Travis and ci.hocon JVMCI versions do not match: {0} vs. {1}".format(travis_version, hocon_version)) mx.log('JVMCI versions are ok!')
def _junit_r_harness(args, vmArgs, junitArgs): # always pass the directory where the expected output file should reside runlistener_arg = 'expected=' + _test_srcdir() # there should not be any unparsed arguments at this stage if args.remainder: mx.abort('unexpected arguments: ' + str(args.remainder).strip('[]') + '; did you forget --tests') def add_arg_separator(): # can't update in Python 2.7 arg = runlistener_arg if len(arg) > 0: arg += ',' return arg if args.gen_fastr_output: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-fastr=' + args.gen_fastr_output if args.check_expected_output: args.gen_expected_output = True runlistener_arg = add_arg_separator() runlistener_arg += 'check-expected' if args.gen_expected_output: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-expected' if args.keep_trailing_whitespace: runlistener_arg = add_arg_separator() runlistener_arg += 'keep-trailing-whitespace' if args.gen_expected_quiet: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-expected-quiet' if args.gen_diff_output: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-diff=' + args.gen_diff_output # if args.test_methods: # runlistener_arg = add_arg_separator() # runlistener_arg = 'test-methods=' + args.test_methods # use a custom junit.RunListener runlistener = 'com.oracle.truffle.r.test.TestBase$RunListener' if len(runlistener_arg) > 0: runlistener += ':' + runlistener_arg junitArgs += ['--runlistener', runlistener] # on some systems a large Java stack seems necessary vmArgs += ['-Xss12m'] vmArgs += _graal_options(nocompile=True) setREnvironment() jdk = args.jdk if not jdk: jdk = get_default_jdk() vmArgs = _sanitize_vmArgs(jdk, vmArgs) return mx.run_java(vmArgs + junitArgs, nonZeroIsFatal=False, jdk=jdk)
def getDacapo(name, dacapoArgs=None, extraVmArguments=None): dacapo = mx.get_env('DACAPO_CP') if dacapo is None: l = mx.library('DACAPO', False) if l is not None: dacapo = l.get_path(True) else: mx.abort('DaCapo 9.12 jar file must be specified with DACAPO_CP environment variable or as DACAPO library') if not isfile(dacapo) or not dacapo.endswith('.jar'): mx.abort('Specified DaCapo jar file does not exist or is not a jar file: ' + dacapo) dacapoSuccess = re.compile(r"^===== DaCapo 9\.12 ([a-zA-Z0-9_]+) PASSED in ([0-9]+) msec =====", re.MULTILINE) dacapoFail = re.compile(r"^===== DaCapo 9\.12 ([a-zA-Z0-9_]+) FAILED (warmup|) =====", re.MULTILINE) dacapoTime = re.compile(r"===== DaCapo 9\.12 (?P<benchmark>[a-zA-Z0-9_]+) PASSED in (?P<time>[0-9]+) msec =====") dacapoTime1 = re.compile(r"===== DaCapo 9\.12 (?P<benchmark>[a-zA-Z0-9_]+) completed warmup 1 in (?P<time>[0-9]+) msec =====") dacapoMatcher = ValuesMatcher(dacapoTime, {'group' : 'DaCapo', 'name' : '<benchmark>', 'score' : '<time>'}) dacapoMatcher1 = ValuesMatcher(dacapoTime1, {'group' : 'DaCapo-1stRun', 'name' : '<benchmark>', 'score' : '<time>'}) # Use ipv4 stack for dacapos; tomcat+solaris+ipv6_interface fails (see also: JDK-8072384) return Test("DaCapo-" + name, ['-jar', mx._cygpathU2W(dacapo), name] + _noneAsEmptyList(dacapoArgs), [dacapoSuccess], [dacapoFail], [dacapoMatcher, dacapoMatcher1], ['-Xms2g', '-XX:+' + gc, '-XX:-UseCompressedOops', "-Djava.net.preferIPv4Stack=true", '-G:+ExitVMOnException'] + _noneAsEmptyList(extraVmArguments))
def __init__(self, name, args, tags, suppress=None): self.name = name self.args = args self.suppress = suppress self.tags = tags if tags is not None and (type(tags) is not list or all(not isinstance(x, basestring) for x in tags)): mx.abort("Gate tag argument must be a list of strings, tag argument:" + str(tags))
def get_module_deps(dist): """ Gets the JAR distributions and their constituent Java projects whose artifacts (i.e., class files and resources) are the input to the Java module jar created by `make_java_module` for a given distribution. :return: the set of `JARDistribution` objects and their constituent `JavaProject` transitive dependencies denoted by the ``moduledeps`` attribute """ if dist.suite.getMxCompatibility().moduleDepsEqualDistDeps(): return dist.archived_deps() if not hasattr(dist, '.module_deps'): roots = getattr(dist, 'moduledeps', []) if not roots: return roots for root in roots: if not root.isJARDistribution(): mx.abort('moduledeps can (currently) only include JAR distributions: ' + str(root), context=dist) moduledeps = [] def _visit(dep, edges): if dep is not dist: if dep.isJavaProject() or dep.isJARDistribution(): if dep not in moduledeps: moduledeps.append(dep) else: mx.abort('modules can (currently) only include JAR distributions and Java projects: ' + str(dep), context=dist) def _preVisit(dst, edge): return not dst.isJreLibrary() and not dst.isJdkLibrary() mx.walk_deps(roots, preVisit=_preVisit, visit=_visit) setattr(dist, '.module_deps', moduledeps) return getattr(dist, '.module_deps')
def run_vm(args, vm=None, nonZeroIsFatal=True, out=None, err=None, cwd=None, timeout=None, debugLevel=None, vmbuild=None): """run a Java program by executing the java executable in a JVMCI JDK""" jdkTag = mx.get_jdk_option().tag if jdkTag and jdkTag != _JVMCI_JDK_TAG: mx.abort('The "--jdk" option must have the tag "' + _JVMCI_JDK_TAG + '" when running a command requiring a JVMCI VM') jdk = get_jvmci_jdk(debugLevel=debugLevel or _translateLegacyDebugLevel(vmbuild)) return jdk.run_java(args, nonZeroIsFatal=nonZeroIsFatal, out=out, err=err, cwd=cwd, timeout=timeout)
def _grep_version(files, msg): version = None dev = None last = None linenr = 0 for filename in files: for line in open(filename): m = version_pattern.search(line) if m: new_version = m.group('version') new_dev = bool(m.group('dev')) if (version and version != new_version) or (dev is not None and dev != new_dev): mx.abort( os.linesep.join([ "Multiple JVMCI versions found in {0} files:".format(msg), " {0} in {1}:{2}: {3}".format(version + ('-dev' if dev else ''), *last), " {0} in {1}:{2}: {3}".format(new_version + ('-dev' if new_dev else ''), filename, linenr, line), ])) last = (filename, linenr, line.rstrip()) version = new_version dev = new_dev linenr += 1 if not version: mx.abort("No JVMCI version found in {0} files!".format(msg)) return version, dev
def _visit(dep, edges): if dep is not dist: if dep.isJavaProject() or dep.isJARDistribution(): if dep not in moduledeps: moduledeps.append(dep) else: mx.abort('modules can (currently) only include JAR distributions and Java projects: ' + str(dep), context=dist)
def js_image_test(binary, bench_location, name, warmup_iterations, iterations, timeout=None, bin_args=None): bin_args = bin_args if bin_args is not None else [] jsruncmd = [binary] + bin_args + [join(bench_location, 'harness.js'), '--', join(bench_location, name + '.js'), '--', '--warmup-iterations=' + str(warmup_iterations), '--iterations=' + str(iterations)] mx.log(' '.join(jsruncmd)) passing = [] stdoutdata = [] def stdout_collector(x): stdoutdata.append(x) mx.log(x.rstrip()) stderrdata = [] def stderr_collector(x): stderrdata.append(x) mx.warn(x.rstrip()) returncode = mx.run(jsruncmd, cwd=bench_location, out=stdout_collector, err=stderr_collector, nonZeroIsFatal=False, timeout=timeout) if returncode == mx.ERROR_TIMEOUT: print('INFO: TIMEOUT (> %d): %s' % (timeout, name)) elif returncode >= 0: matches = 0 for line in stdoutdata: if re.match(r'^\S+: *\d+(\.\d+)?\s*$', line): matches += 1 if matches > 0: passing = stdoutdata if not passing: mx.abort('JS benchmark ' + name + ' failed')
def c1visualizer(args): """run the Cl Compiler Visualizer""" libpath = join(_suite.dir, 'lib') if mx.get_os() == 'windows': executable = join(libpath, 'c1visualizer', 'bin', 'c1visualizer.exe') else: executable = join(libpath, 'c1visualizer', 'bin', 'c1visualizer') # Check whether the current C1Visualizer installation is the up-to-date if exists(executable) and not exists(mx.library('C1VISUALIZER_DIST').get_path(resolve=False)): mx.log('Updating C1Visualizer') shutil.rmtree(join(libpath, 'c1visualizer')) archive = mx.library('C1VISUALIZER_DIST').get_path(resolve=True) if not exists(executable): zf = zipfile.ZipFile(archive, 'r') zf.extractall(libpath) if not exists(executable): mx.abort('C1Visualizer binary does not exist: ' + executable) if mx.get_os() != 'windows': # Make sure that execution is allowed. The zip file does not always specfiy that correctly os.chmod(executable, 0777) mx.run([executable])
def igv(args): """run the Ideal Graph Visualizer""" logFile = '.ideal_graph_visualizer.log' with open(join(_suite.dir, logFile), 'w') as fp: mx.logv('[Ideal Graph Visualizer log is in ' + fp.name + ']') nbplatform = join(_suite.dir, 'src', 'share', 'tools', 'IdealGraphVisualizer', 'nbplatform') # Remove NetBeans platform if it is earlier than the current supported version if exists(nbplatform): updateTrackingFile = join(nbplatform, 'platform', 'update_tracking', 'org-netbeans-core.xml') if not exists(updateTrackingFile): mx.log('Could not find \'' + updateTrackingFile + '\', removing NetBeans platform') shutil.rmtree(nbplatform) else: dom = xml.dom.minidom.parse(updateTrackingFile) currentVersion = mx.VersionSpec(dom.getElementsByTagName('module_version')[0].getAttribute('specification_version')) supportedVersion = mx.VersionSpec('3.43.1') if currentVersion < supportedVersion: mx.log('Replacing NetBeans platform version ' + str(currentVersion) + ' with version ' + str(supportedVersion)) shutil.rmtree(nbplatform) elif supportedVersion < currentVersion: mx.log('Supported NetBeans version in igv command should be updated to ' + str(currentVersion)) if not exists(nbplatform): mx.logv('[This execution may take a while as the NetBeans platform needs to be downloaded]') env = _igvBuildEnv() # make the jar for Batik 1.7 available. env['IGV_BATIK_JAR'] = mx.library('BATIK').get_path(True) if mx.run(['ant', '-f', mx._cygpathU2W(join(_suite.dir, 'src', 'share', 'tools', 'IdealGraphVisualizer', 'build.xml')), '-l', mx._cygpathU2W(fp.name), 'run'], env=env, nonZeroIsFatal=False): mx.abort("IGV ant build & launch failed. Check '" + logFile + "'. You can also try to delete 'src/share/tools/IdealGraphVisualizer/nbplatform'.")
def _run_netbeans_app(app_name, env=None, args=None): args = [] if args is None else args dist = app_name.upper() + '_DIST' name = app_name.lower() extractPath = join(_suite.get_output_root()) if mx.get_os() == 'windows': executable = join(extractPath, name, 'bin', name + '.exe') else: executable = join(extractPath, name, 'bin', name) # Check whether the current installation is up-to-date if exists(executable) and not exists(mx.library(dist).get_path(resolve=False)): mx.log('Updating ' + app_name) shutil.rmtree(join(extractPath, name)) archive = mx.library(dist).get_path(resolve=True) if not exists(executable): zf = zipfile.ZipFile(archive, 'r') zf.extractall(extractPath) if not exists(executable): mx.abort(app_name + ' binary does not exist: ' + executable) if mx.get_os() != 'windows': # Make sure that execution is allowed. The zip file does not always specfiy that correctly os.chmod(executable, 0777) mx.run([executable]+args, env=env)
def run_java(self, args, out=None, err=None, cwd=None, nonZeroIsFatal=False): tag = mx.get_jdk_option().tag if tag and tag != mx_graal_core._JVMCI_JDK_TAG: mx.abort("The '{0}/{1}' VM requires '--jdk={2}'".format( self.name(), self.config_name(), mx_graal_core._JVMCI_JDK_TAG)) mx.get_jdk(tag=mx_graal_core._JVMCI_JDK_TAG).run_java( args, out=out, err=out, cwd=cwd, nonZeroIsFatal=False)
def jmhJAR(self): if self.jmh_jar is None: mx.abort("Please use the --jmh-jar benchmark suite argument to set the JMH jar file.") jmh_jar = os.path.expanduser(self.jmh_jar) if not os.path.exists(jmh_jar): mx.abort("The --jmh-jar argument points to a non-existing file: " + jmh_jar) return jmh_jar
def ensureDragonEggExists(): """downloads dragonegg if not downloaded yet""" if not os.path.exists(dragonEggPath()): if 'DRAGONEGG' in os.environ: mx.abort('dragonegg not found at ' + os.environ['DRAGONEGG']) else: pullInstallDragonEgg()
def _junit_r_harness(args, vmArgs, junitArgs): # always pass the directory where the expected output file should reside runlistener_arg = 'expected=' + _test_srcdir() # there should not be any unparsed arguments at this stage if args.remainder: mx.abort('unexpected arguments: ' + str(args.remainder).strip('[]') + '; did you forget --tests') def add_arg_separator(): # can't update in Python 2.7 arg = runlistener_arg if len(arg) > 0: arg += ',' return arg if args.gen_fastr_output: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-fastr=' + args.gen_fastr_output if args.check_expected_output: args.gen_expected_output = True runlistener_arg = add_arg_separator() runlistener_arg += 'check-expected' if args.gen_expected_output: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-expected' if args.keep_trailing_whitespace: runlistener_arg = add_arg_separator() runlistener_arg += 'keep-trailing-whitespace' if args.gen_expected_quiet: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-expected-quiet' if args.gen_diff_output: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-diff=' + args.gen_diff_output # if args.test_methods: # runlistener_arg = add_arg_separator() # runlistener_arg = 'test-methods=' + args.test_methods # use a custom junit.RunListener runlistener = 'com.oracle.truffle.r.test.TestBase$RunListener' if len(runlistener_arg) > 0: runlistener += ':' + runlistener_arg junitArgs += ['--runlistener', runlistener] # suppress Truffle compilation by using a high threshold vmArgs += ['-Dgraal.TruffleCompilationThreshold=100000'] # on some systems a large Java stack seems necessary vmArgs += ['-Xss12m'] if _mx_jvmci: vmArgs += ['-Dgraal.InliningDepthError=500', '-Dgraal.EscapeAnalysisIterations=3', '-XX:JVMCINMethodSizeLimit=1000000', '-Xmx5G'] setREnvironment() jdk = get_default_jdk() return mx.run_java(vmArgs + junitArgs, nonZeroIsFatal=False, jdk=jdk)
def expect(line, prefix, optional=False): x = line.find(prefix) if x < 0: if optional: return None else: mx.abort('expecting: ' + prefix + ' in ' + line) return line[x + len(prefix):].strip()
def getGPP(): """tries to locate a g++ version suitable to execute Dragonegg""" specifiedCPP = getCommand('SULONG_GPP') if specifiedCPP is not None: return specifiedCPP if getDefaultGPP() is not None: return getDefaultGPP() else: mx.abort('Could not find a compatible GCC version to execute Dragonegg! Please install g++-4.6 or another compatible version and specify it in the env file')
def getGFortran(): """tries to locate a gfortran version suitable to execute Dragonegg""" specifiedGFortran = getCommand('SULONG_GFORTRAN') if specifiedGFortran is not None: return specifiedGFortran if getDefaultGFortran() is not None: return getDefaultGFortran() else: mx.abort('Could not find a compatible GFortran version to execute Dragonegg! Please install gfortran-4.6 or another compatible version and specify it in the env file')
def ctw(args, extraVMarguments=None): """run CompileTheWorld""" defaultCtwopts = 'Inline=false' parser = ArgumentParser(prog='mx ctw') parser.add_argument('--ctwopts', action='store', help='space separated JVMCI options used for CTW compilations (default: --ctwopts="' + defaultCtwopts + '")', default=defaultCtwopts, metavar='<options>') parser.add_argument('--cp', '--jar', action='store', help='jar or class path denoting classes to compile', metavar='<path>') if not isJDK8: parser.add_argument('--limitmods', action='store', help='limits the set of compiled classes to only those in the listed modules', metavar='<modulename>[,<modulename>...]') args, vmargs = parser.parse_known_args(args) if args.ctwopts: # Replace spaces with '#' since it cannot contain spaces vmargs.append('-Dgraal.CompileTheWorldConfig=' + re.sub(r'\s+', '#', args.ctwopts)) # suppress menubar and dock when running on Mac; exclude x11 classes as they may cause VM crashes (on Solaris) vmargs = ['-Djava.awt.headless=true'] + vmargs if args.cp: cp = os.path.abspath(args.cp) if not isJDK8 and not _is_jvmci_enabled(vmargs): mx.abort('Non-Graal CTW does not support specifying a specific class path or jar to compile') else: if isJDK8: cp = join(jdk.home, 'jre', 'lib', 'rt.jar') else: # Compile all classes in the JRT image by default. cp = join(jdk.home, 'lib', 'modules') vmargs.append('-Dgraal.CompileTheWorldExcludeMethodFilter=sun.awt.X11.*.*') if _get_XX_option_value(vmargs + _noneAsEmptyList(extraVMarguments), 'UseJVMCICompiler', False): vmargs.append('-XX:+BootstrapJVMCI') if isJDK8: if not _is_jvmci_enabled(vmargs): vmargs.extend(['-XX:+CompileTheWorld', '-Xbootclasspath/p:' + cp]) else: vmargs.extend(['-Dgraal.CompileTheWorldClasspath=' + cp, '-XX:-UseJVMCIClassLoader', 'com.oracle.graal.hotspot.CompileTheWorld']) else: if _is_jvmci_enabled(vmargs): # To be able to load all classes in the JRT with Class.forName, # all JDK modules need to be made root modules. limitmods = frozenset(args.limitmods.split(',')) if args.limitmods else None nonBootJDKModules = [m.name for m in jdk.get_modules() if not m.boot and (limitmods is None or m.name in limitmods)] if nonBootJDKModules: vmargs.append('-addmods') vmargs.append(','.join(nonBootJDKModules)) if args.limitmods: vmargs.append('-DCompileTheWorld.limitmods=' + args.limitmods) vmargs.extend(['-Dgraal.CompileTheWorldClasspath=' + cp, 'com.oracle.graal.hotspot.CompileTheWorld']) else: vmargs.append('-XX:+CompileTheWorld') run_vm(vmargs + _noneAsEmptyList(extraVMarguments))
def extractTarball(file, target_dir): if file.endswith('tar'): with tarfile.open(file, 'r:') as tf: tf.extractall(target_dir) elif file.endswith('jar') or file.endswith('zip'): with zipfile.ZipFile(file, "r") as zf: zf.extractall(target_dir) else: mx.abort('Unsupported compressed file ' + file)
def createCommandLineArgs(self, benchmarks, bmSuiteArgs): if benchmarks is not None: mx.abort("No benchmark should be specified for the selected suite.") vmArgs = self.vmArgs(bmSuiteArgs) runArgs = self.runArgs(bmSuiteArgs) mainClass = "spec.jbb.JBBmain" propArgs = ["-propfile", "SPECjbb.props"] return ( vmArgs + ["-cp"] + [self.specJbbClassPath()] + [mainClass] + propArgs + runArgs)
def specJbbClassPath(self): specjbb2015 = mx.get_env("SPECJBB2015") if specjbb2015 is None: mx.abort("Please set the SPECJBB2015 environment variable to a " + "SPECjbb2015 directory.") jbbpath = join(specjbb2015, "specjbb2015.jar") if not exists(jbbpath): mx.abort("The SPECJBB2015 environment variable points to a directory " + "without the specjbb2015.jar file.") return jbbpath
def mx_register_dynamic_suite_constituents(register_project, register_distribution): """ :type register_project: (mx.Project) -> None :type register_distribution: (mx.Distribution) -> None """ if mx_sdk_vm_impl.has_component('FastR'): fastr_release_env = mx.get_env('FASTR_RELEASE', None) if fastr_release_env != 'true': mx.abort(( 'When including FastR, please set FASTR_RELEASE to \'true\' (env FASTR_RELEASE=true mx ...). Got FASTR_RELEASE={}. ' 'For local development, you may also want to disable recommended packages build (FASTR_NO_RECOMMENDED=true) and ' 'capturing of system libraries (export FASTR_CAPTURE_DEPENDENCIES set to an empty value). ' 'See building.md in FastR documentation for more details.' ).format(fastr_release_env)) if register_project: register_project(GraalVmSymlinks()) benchmark_dist = _suite.dependency("POLYBENCH_BENCHMARKS") def _add_project_to_dist(destination, name, source='dependency:{name}/*'): if destination not in benchmark_dist.layout: benchmark_dist.layout[destination] = [] benchmark_dist.layout[destination].append(source.format(name=name)) benchmark_dist.buildDependencies.append(name) if mx_sdk_vm_impl.has_component('GraalWasm'): import mx_wasm class GraalVmWatProject(mx_wasm.WatProject): def getSourceDir(self): return self.subDir def isBenchmarkProject(self): return self.name.startswith("benchmarks.") register_project( GraalVmWatProject( suite=_suite, name='benchmarks.interpreter.wasm', deps=[], workingSets=None, subDir=join(_suite.dir, 'benchmarks', 'interpreter'), theLicense=None, testProject=True, defaultBuild=False, )) # add wasm to the layout of the benchmark distribution _add_project_to_dist('./interpreter/', 'benchmarks.interpreter.wasm') if mx_sdk_vm_impl.has_component('LLVM Runtime Native'): register_project( mx.NativeProject( suite=_suite, name='benchmarks.interpreter.llvm.native', results=['interpreter/'], buildEnv={ 'NATIVE_LLVM_CC': '<toolchainGetToolPath:native,CC>', }, buildDependencies=[ 'sulong:SULONG_BOOTSTRAP_TOOLCHAIN', ], vpath=True, deps=[], workingSets=None, d=join(_suite.dir, 'benchmarks', 'interpreter'), subDir=None, srcDirs=[''], output=None, theLicense=None, testProject=True, defaultBuild=False, )) # add bitcode to the layout of the benchmark distribution _add_project_to_dist('./', 'benchmarks.interpreter.llvm.native') if mx_sdk_vm_impl.has_component('Java on Truffle'): java_benchmarks = join(_suite.dir, 'benchmarks', 'interpreter', 'java') for f in os.listdir(java_benchmarks): if isdir(join(java_benchmarks, f)) and not f.startswith("."): main_class = basename(f) simple_name = main_class.split(".")[-1] project_name = 'benchmarks.interpreter.espresso.' + simple_name.lower( ) register_project( mx.JavaProject( suite=_suite, subDir=None, srcDirs=[ join(_suite.dir, 'benchmarks', 'interpreter', 'java', main_class) ], deps=[], name=project_name, d=join(_suite.dir, 'benchmarks', 'interpreter', 'java', main_class), javaCompliance='1.8+', checkstyleProj=project_name, workingSets=None, theLicense=None, testProject=True, defaultBuild=False, )) dist_name = 'POLYBENCH_ESPRESSO_' + simple_name.upper() register_distribution( mx_jardistribution.JARDistribution( suite=_suite, subDir=None, srcDirs=[''], sourcesPath=[], deps=[project_name], mainClass=main_class, name=dist_name, path=simple_name + '.jar', platformDependent=False, distDependencies=[], javaCompliance='1.8+', excludedLibs=[], workingSets=None, theLicense=None, testProject=True, defaultBuild=False, )) # add jars to the layout of the benchmark distribution _add_project_to_dist( './interpreter/{}.jar'.format(simple_name), dist_name, source='dependency:{name}/polybench-espresso-' + simple_name.lower() + '.jar')
def image(args): """build a boot image Run the BootImageGenerator to build a Maxine boot image. The classes and packages specified on the command line will be included in the boot image in addition to those found by the Package.java mechanism. Package names are differentiated from class names by being prefixed with '^'. The platform configuration for the generated image is auto-detected by native methods. However, the following system properties can be used to override the defaults: Name | Description | Example values ================+===============================+================ max.platform | name of a preset platform | solaris-amd64 linux-amd64 darwin-amd64 linux-aarch64 max.cpu | processor model | AMD64 IA32 SPARCV9 ARMV7 Aarch64 max.isa | instruction set architecture | AMD64 ARM PPC SPARC Aarch64 max.os | operating system | Darwin Linux Solaris max.endianness | endianness | BIG LITTLE max.bits | machine word size | 64 32 max.page | page size | 4096 8192 max.nsig | number of signals | 32 mas.idiv | has hw integer divider | 1/0 These system properties can be specified as options to the image command (e.g. '-os Darwin -bits 32'). An option starting with '@' denotes one of the preconfigured set of options described by running "mx options". An option starting with '--' is interpreted as a VM option of the same name after the leading '-' is removed. For example, to use the '-verbose:class' VM option to trace class loading while image building, specify '--verbose:class'. Note that not all VM options have an effect during image building. Use "mx image -help" to see what other options this command accepts.""" systemProps = ['-Xmx1G'] imageArgs = [] i = 0 while i < len(args): arg = args[i] if arg[0] == '@': name = arg.lstrip('@') configs = _configs() if not name in configs: mx.log() mx.abort('Invalid image configuration: ' + name) if "graal" in name: systemProps += ['-ea', '-esa'] values = configs[name].split('@') del args[i] args[i:i] = values continue elif arg in [ '-platform', '-cpu', '-isa', '-os', '-endianness', '-bits', '-page', '-nsig', '-idiv' ]: name = arg.lstrip('-') i += 1 if i == len(args): mx.abort('Missing value for ' + arg) value = args[i] systemProps += ['-Dmax.' + name + '=' + value] elif arg.startswith('--XX:LogFile='): os.environ['MAXINE_LOG_FILE'] = arg.split('=', 1)[1] elif arg.startswith('--XX:+PrintCFGToFile'): os.environ['PRINT_CFG'] = '1' elif arg.startswith('--XX:+EnableBootImageDebugMethodID'): os.environ['ENABLE_DEBUG_METHODS_ID'] = '1' elif arg.startswith('--XX:+PrintHIR'): os.environ['PRINT_HIR'] = '1' elif arg.startswith('--XX:PrintFilter='): os.environ['PRINT_FILTER'] = arg.split('=', 1)[1] elif arg == '-vma': systemProps += ['-Dmax.permsize=2'] else: imageArgs += [arg] i += 1 mx.run_java( ['-Xbootclasspath/a:' + mx.distribution('GRAAL').path] + systemProps + [ '-cp', suite_classpath(), 'com.sun.max.vm.hosted.BootImageGenerator', '-trace=1', '-run=java' ] + imageArgs)
def bench(args): """run benchmarks and parse their output for results Results are JSON formated : {group : {benchmark : score}}.""" resultFile = None if '-resultfile' in args: index = args.index('-resultfile') if index + 1 < len(args): resultFile = args[index + 1] del args[index] del args[index] else: mx.abort('-resultfile must be followed by a file name') resultFileCSV = None if '-resultfilecsv' in args: index = args.index('-resultfilecsv') if index + 1 < len(args): resultFileCSV = args[index + 1] del args[index] del args[index] else: mx.abort('-resultfilecsv must be followed by a file name') vm = mx_graal.get_vm() if len(args) is 0: args = ['all'] vmArgs = [arg for arg in args if arg.startswith('-')] def benchmarks_in_group(group): prefix = group + ':' return [a[len(prefix):] for a in args if a.startswith(prefix)] results = {} benchmarks = [] # DaCapo if 'dacapo' in args or 'all' in args: benchmarks += sanitycheck.getDacapos(level=sanitycheck.SanityCheckLevel.Benchmark) else: dacapos = benchmarks_in_group('dacapo') for dacapo in dacapos: if dacapo not in sanitycheck.dacapoSanityWarmup.keys(): mx.abort('Unknown DaCapo : ' + dacapo) iterations = sanitycheck.dacapoSanityWarmup[dacapo][sanitycheck.SanityCheckLevel.Benchmark] if iterations > 0: benchmarks += [sanitycheck.getDacapo(dacapo, ['-n', str(iterations)])] if 'scaladacapo' in args or 'all' in args: benchmarks += sanitycheck.getScalaDacapos(level=sanitycheck.SanityCheckLevel.Benchmark) else: scaladacapos = benchmarks_in_group('scaladacapo') for scaladacapo in scaladacapos: if scaladacapo not in sanitycheck.dacapoScalaSanityWarmup.keys(): mx.abort('Unknown Scala DaCapo : ' + scaladacapo) iterations = sanitycheck.dacapoScalaSanityWarmup[scaladacapo][sanitycheck.SanityCheckLevel.Benchmark] if iterations > 0: benchmarks += [sanitycheck.getScalaDacapo(scaladacapo, ['-n', str(iterations)])] # Bootstrap if 'bootstrap' in args or 'all' in args: benchmarks += sanitycheck.getBootstraps() # SPECjvm2008 if 'specjvm2008' in args or 'all' in args: benchmarks += [sanitycheck.getSPECjvm2008(['-ikv', '-wt', '120', '-it', '120'])] else: specjvms = benchmarks_in_group('specjvm2008') for specjvm in specjvms: benchmarks += [sanitycheck.getSPECjvm2008(['-ikv', '-wt', '120', '-it', '120', specjvm])] if 'specjbb2005' in args or 'all' in args: benchmarks += [sanitycheck.getSPECjbb2005()] if 'specjbb2013' in args: # or 'all' in args //currently not in default set benchmarks += [sanitycheck.getSPECjbb2013()] if 'ctw-full' in args: benchmarks.append(sanitycheck.getCTW(vm, sanitycheck.CTWMode.Full)) if 'ctw-noinline' in args: benchmarks.append(sanitycheck.getCTW(vm, sanitycheck.CTWMode.NoInline)) for f in extraBenchmarks: f(args, vm, benchmarks) for test in benchmarks: for (groupName, res) in test.bench(vm, extraVmOpts=vmArgs).items(): group = results.setdefault(groupName, {}) group.update(res) mx.log(json.dumps(results)) if resultFile: with open(resultFile, 'w') as f: f.write(json.dumps(results)) if resultFileCSV: with open(resultFileCSV, 'w') as f: for key1, value1 in results.iteritems(): f.write('%s;\n' % (str(key1))) for key2, value2 in sorted(value1.iteritems()): f.write('%s; %s;\n' % (str(key2), str(value2)))
def _intellij_suite(args, s, declared_modules, referenced_modules, sdks, refreshOnly=False, mx_python_modules=False, generate_external_projects=True, java_modules=True, module_files_only=False, generate_native_projects=False): libraries = set() jdk_libraries = set() project_dir = s.dir ideaProjectDirectory = join(project_dir, '.idea') modulesXml = mx.XMLDoc() if not module_files_only and not s.isBinarySuite(): mx.ensure_dir_exists(ideaProjectDirectory) nameFile = join(ideaProjectDirectory, '.name') mx.update_file(nameFile, s.name) modulesXml.open('project', attributes={'version': '4'}) modulesXml.open('component', attributes={'name': 'ProjectModuleManager'}) modulesXml.open('modules') def _intellij_exclude_if_exists(xml, p, name, output=False): root = p.get_output_root() if output else p.dir path = join(root, name) if exists(path): excludeRoot = p.get_output_root() if output else '$MODULE_DIR$' excludePath = join(excludeRoot, name) xml.element('excludeFolder', attributes={'url':'file://' + excludePath}) annotationProcessorProfiles = {} def _complianceToIntellijLanguageLevel(compliance): # they changed the name format starting with JDK_10 if compliance.value >= 10: # Lastest Idea 2018.2 only understands JDK_11 so clamp at that value return 'JDK_' + str(min(compliance.value, 11)) return 'JDK_1_' + str(compliance.value) def _intellij_external_project(externalProjects, sdks, host): if externalProjects: for project_name, project_definition in externalProjects.items(): if not project_definition.get('path', None): mx.abort("external project {} is missing path attribute".format(project_name)) if not project_definition.get('type', None): mx.abort("external project {} is missing type attribute".format(project_name)) supported = ['path', 'type', 'source', 'test', 'excluded', 'load_path'] unknown = set(project_definition.keys()) - frozenset(supported) if unknown: mx.abort("There are unsupported {} keys in {} external project".format(unknown, project_name)) path = os.path.realpath(join(host.dir, project_definition["path"])) module_type = project_definition["type"] moduleXml = mx.XMLDoc() moduleXml.open('module', attributes={'type': {'ruby': 'RUBY_MODULE', 'python': 'PYTHON_MODULE', 'web': 'WEB_MODULE'}.get(module_type, 'UKNOWN_MODULE'), 'version': '4'}) moduleXml.open('component', attributes={'name': 'NewModuleRootManager', 'inherit-compiler-output': 'true'}) moduleXml.element('exclude-output') moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'}) for name in project_definition.get('source', []): moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + name, 'isTestSource': str(False)}) for name in project_definition.get('test', []): moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + name, 'isTestSource': str(True)}) for name in project_definition.get('excluded', []): _intellij_exclude_if_exists(moduleXml, type('', (object,), {"dir": path})(), name) moduleXml.close('content') if module_type == "ruby": moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_ruby_sdk_type, 'jdkName': intellij_get_ruby_sdk_name(sdks)}) elif module_type == "python": moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_python_sdk_type, 'jdkName': intellij_get_python_sdk_name(sdks)}) elif module_type == "web": # nothing to do pass else: mx.abort("External project type {} not supported".format(module_type)) moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'}) moduleXml.close('component') load_paths = project_definition.get('load_path', []) if load_paths: if not module_type == "ruby": mx.abort("load_path is supported only for ruby type external project") moduleXml.open('component', attributes={'name': 'RModuleSettingsStorage'}) load_paths_attributes = {} load_paths_attributes['number'] = str(len(load_paths)) for i, name in enumerate(load_paths): load_paths_attributes["string" + str(i)] = "$MODULE_DIR$/" + name moduleXml.element('LOAD_PATH', load_paths_attributes) moduleXml.close('component') moduleXml.close('module') moduleFile = join(path, project_name + '.iml') mx.update_file(moduleFile, moduleXml.xml(indent=' ', newl='\n')) if not module_files_only: declared_modules.add(project_name) moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(moduleFile, s.dir) modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath}) if generate_external_projects: for p in s.projects_recursive() + mx._mx_suite.projects_recursive(): _intellij_external_project(getattr(p, 'externalProjects', None), sdks, p) max_checkstyle_version = None compilerXml = None if java_modules: if not module_files_only: compilerXml = mx.XMLDoc() compilerXml.open('project', attributes={'version': '4'}) # The IntelliJ parser seems to mishandle empty ADDITIONAL_OPTIONS_OVERRIDE elements # so only emit the section if there will be something in it. additionalOptionsOverrides = False assert not s.isBinarySuite() # create the modules (1 IntelliJ module = 1 mx project/distribution) for p in s.projects_recursive() + mx._mx_suite.projects_recursive(): if not p.isJavaProject(): continue jdk = mx.get_jdk(p.javaCompliance) assert jdk # Value of the $MODULE_DIR$ IntelliJ variable and parent directory of the .iml file. module_dir = mx.ensure_dir_exists(p.dir) processors = p.annotation_processors() if processors: annotationProcessorProfiles.setdefault((p.source_gen_dir_name(),) + tuple(processors), []).append(p) intellijLanguageLevel = _complianceToIntellijLanguageLevel(p.javaCompliance) moduleXml = mx.XMLDoc() moduleXml.open('module', attributes={'type': 'JAVA_MODULE', 'version': '4'}) moduleXml.open('component', attributes={'name': 'NewModuleRootManager', 'LANGUAGE_LEVEL': intellijLanguageLevel, 'inherit-compiler-output': 'false'}) moduleXml.element('output', attributes={'url': 'file://$MODULE_DIR$/' + os.path.relpath(p.output_dir(), module_dir)}) moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'}) for src in p.srcDirs: srcDir = mx.ensure_dir_exists(join(p.dir, src)) moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + os.path.relpath(srcDir, module_dir), 'isTestSource': str(p.is_test_project())}) for name in ['.externalToolBuilders', '.settings', 'nbproject']: _intellij_exclude_if_exists(moduleXml, p, name) moduleXml.close('content') if processors: moduleXml.open('content', attributes={'url': 'file://' + p.get_output_root()}) genDir = p.source_gen_dir() mx.ensure_dir_exists(genDir) moduleXml.element('sourceFolder', attributes={'url':'file://' + p.source_gen_dir(), 'isTestSource': str(p.is_test_project()), 'generated': 'true'}) for name in [basename(p.output_dir())]: _intellij_exclude_if_exists(moduleXml, p, name, output=True) moduleXml.close('content') moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'}) proj = p dependencies_project_packages = set() def should_process_dep(dep, edge): if dep.isTARDistribution() or dep.isNativeProject() or dep.isArchivableProject() or dep.isResourceLibrary(): mx.logv("Ignoring dependency from {} to {}".format(proj.name, dep.name)) return False return True def process_dep(dep, edge): if dep is proj: return if dep.isLibrary() or dep.isJARDistribution() or dep.isMavenProject(): libraries.add(dep) moduleXml.element('orderEntry', attributes={'type': 'library', 'name': dep.name, 'level': 'project'}) elif dep.isJavaProject(): dependencies_project_packages.update(dep.defined_java_packages()) referenced_modules.add(dep.name) moduleXml.element('orderEntry', attributes={'type': 'module', 'module-name': dep.name}) elif dep.isJdkLibrary(): jdk_libraries.add(dep) if jdk.javaCompliance < dep.jdkStandardizedSince: moduleXml.element('orderEntry', attributes={'type': 'library', 'name': dep.name, 'level': 'project'}) else: mx.logv("{} skipping {} for {}".format(p, dep, jdk)) #pylint: disable=undefined-loop-variable elif dep.isJreLibrary(): pass elif dep.isClasspathDependency(): moduleXml.element('orderEntry', attributes={'type': 'library', 'name': dep.name, 'level': 'project'}) else: mx.abort("Dependency not supported: {0} ({1})".format(dep, dep.__class__.__name__)) p.walk_deps(preVisit=should_process_dep, visit=process_dep, ignoredEdges=[mx.DEP_EXCLUDED]) moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_java_sdk_type, 'jdkName': intellij_get_java_sdk_name(sdks, jdk)}) moduleXml.close('component') if compilerXml and jdk.javaCompliance >= '9': moduleDeps = p.get_concealed_imported_packages(jdk=jdk) if moduleDeps: exports = sorted([(m, pkgs) for m, pkgs in moduleDeps.items() if dependencies_project_packages.isdisjoint(pkgs)]) if exports: args = [] exported_modules = set() for m, pkgs in exports: args += ['--add-exports={}/{}=ALL-UNNAMED'.format(m, pkg) for pkg in pkgs] exported_modules.add(m) roots = set(jdk.get_root_modules()) observable_modules = jdk.get_modules() default_module_graph = mx_javamodules.get_transitive_closure(roots, observable_modules) module_graph = mx_javamodules.get_transitive_closure(roots | exported_modules, observable_modules) extra_modules = module_graph - default_module_graph if extra_modules: args.append('--add-modules=' + ','.join((m.name for m in extra_modules))) if not additionalOptionsOverrides: additionalOptionsOverrides = True compilerXml.open('component', {'name': 'JavacSettings'}) compilerXml.open('option', {'name': 'ADDITIONAL_OPTIONS_OVERRIDE'}) compilerXml.element('module', {'name': p.name, 'options': ' '.join(args)}) # Checkstyle csConfig, checkstyleVersion, checkstyleProj = p.get_checkstyle_config() if csConfig: max_checkstyle_version = max(max_checkstyle_version, mx.VersionSpec(checkstyleVersion)) if max_checkstyle_version else mx.VersionSpec(checkstyleVersion) moduleXml.open('component', attributes={'name': 'CheckStyle-IDEA-Module'}) moduleXml.open('option', attributes={'name': 'configuration'}) moduleXml.open('map') moduleXml.element('entry', attributes={'key': "checkstyle-version", 'value': checkstyleVersion}) moduleXml.element('entry', attributes={'key': "active-configuration", 'value': "PROJECT_RELATIVE:" + join(checkstyleProj.dir, ".checkstyle_checks.xml") + ":" + checkstyleProj.name}) moduleXml.close('map') moduleXml.close('option') moduleXml.close('component') moduleXml.close('module') moduleFile = join(module_dir, p.name + '.iml') mx.update_file(moduleFile, moduleXml.xml(indent=' ', newl='\n').rstrip()) if not module_files_only: declared_modules.add(p.name) moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(moduleFile, project_dir) modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath}) if additionalOptionsOverrides: compilerXml.close('option') compilerXml.close('component') if mx_python_modules: def _python_module(suite): """ Gets a tuple describing the IntelliJ module for the python sources of `suite`. The tuple consists of the module name, module directory and the name of the .iml in the module directory. """ name = basename(suite.mxDir) module_dir = suite.mxDir return name, mx.ensure_dir_exists(module_dir), name + '.iml' def _add_declared_module(suite): if not module_files_only: name, module_dir, iml_file = _python_module(suite) declared_modules.add(name) moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(join(module_dir, iml_file), project_dir) modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath}) # mx.<suite> python module: _, module_dir, iml_file = _python_module(s) moduleXml = mx.XMLDoc() moduleXml.open('module', attributes={'type': 'PYTHON_MODULE', 'version': '4'}) moduleXml.open('component', attributes={'name': 'NewModuleRootManager', 'inherit-compiler-output': 'true'}) moduleXml.element('exclude-output') if s.name == 'mx': # MX itself is special. Python sources are also in the parent folder. moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$/..'}) moduleXml.element('sourceFolder', attributes={'url': 'file://$MODULE_DIR$/..', 'isTestSource': 'false'}) else: moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'}) moduleXml.element('sourceFolder', attributes={'url': 'file://$MODULE_DIR$/' + os.path.relpath(s.mxDir, module_dir), 'isTestSource': 'false'}) for d in os.listdir(s.mxDir): directory = join(s.mxDir, d) if isdir(directory) and mx.dir_contains_files_recursively(directory, r".*\.java"): moduleXml.element('excludeFolder', attributes={'url': 'file://$MODULE_DIR$/' + os.path.relpath(directory, module_dir)}) moduleXml.close('content') moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_python_sdk_type, 'jdkName': intellij_get_python_sdk_name(sdks)}) moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'}) processed_suites = {s.name} def _mx_projects_suite(visited_suite, suite_import): if suite_import.name in processed_suites: return processed_suites.add(suite_import.name) dep_suite = mx.suite(suite_import.name) dep_module_name, _, _ = _python_module(dep_suite) moduleXml.element('orderEntry', attributes={'type': 'module', 'module-name': dep_module_name}) _add_declared_module(dep_suite) dep_suite.visit_imports(_mx_projects_suite) s.visit_imports(_mx_projects_suite) if s.name != 'mx': moduleXml.element('orderEntry', attributes={'type': 'module', 'module-name': 'mx.mx'}) moduleXml.close('component') moduleXml.close('module') moduleFile = join(module_dir, iml_file) mx.update_file(moduleFile, moduleXml.xml(indent=' ', newl='\n')) _add_declared_module(s) _add_declared_module(mx._mx_suite) if generate_native_projects: _intellij_native_projects(s, module_files_only, declared_modules, modulesXml) if generate_external_projects: _intellij_external_project(s.suiteDict.get('externalProjects', None), sdks, s) if not module_files_only: modulesXml.close('modules') modulesXml.close('component') modulesXml.close('project') moduleXmlFile = join(ideaProjectDirectory, 'modules.xml') mx.update_file(moduleXmlFile, modulesXml.xml(indent=' ', newl='\n')) if java_modules and not module_files_only: unique_library_file_names = set() librariesDirectory = mx.ensure_dir_exists(join(ideaProjectDirectory, 'libraries')) mx.ensure_dir_exists(librariesDirectory) def make_library(name, path, source_path, suite_dir): libraryXml = mx.XMLDoc() libraryXml.open('component', attributes={'name': 'libraryTable'}) libraryXml.open('library', attributes={'name': name}) libraryXml.open('CLASSES') pathX = mx.relpath_or_absolute(path, suite_dir, prefix='$PROJECT_DIR$') libraryXml.element('root', attributes={'url': 'jar://' + pathX + '!/'}) libraryXml.close('CLASSES') libraryXml.element('JAVADOC') if sourcePath: libraryXml.open('SOURCES') if os.path.isdir(sourcePath): sourcePathX = mx.relpath_or_absolute(sourcePath, suite_dir, prefix='$PROJECT_DIR$') libraryXml.element('root', attributes={'url': 'file://' + sourcePathX}) else: source_pathX = mx.relpath_or_absolute(source_path, suite_dir, prefix='$PROJECT_DIR$') libraryXml.element('root', attributes={'url': 'jar://' + source_pathX + '!/'}) libraryXml.close('SOURCES') else: libraryXml.element('SOURCES') libraryXml.close('library') libraryXml.close('component') libraryFile = join(librariesDirectory, _intellij_library_file_name(name, unique_library_file_names)) return mx.update_file(libraryFile, libraryXml.xml(indent=' ', newl='\n')) # Setup the libraries that were used above for library in libraries: sourcePath = None if library.isLibrary(): path = library.get_path(True) if library.sourcePath: sourcePath = library.get_source_path(True) elif library.isMavenProject(): path = library.get_path(True) sourcePath = library.get_source_path(True) elif library.isJARDistribution(): path = library.path if library.sourcesPath: sourcePath = library.sourcesPath elif library.isClasspathDependency(): path = library.classpath_repr() else: mx.abort('Dependency not supported: {} ({})'.format(library.name, library.__class__.__name__)) make_library(library.name, path, sourcePath, s.dir) jdk = mx.get_jdk() updated = False for library in jdk_libraries: if library.classpath_repr(jdk) is not None: if make_library(library.name, library.classpath_repr(jdk), library.get_source_path(jdk), s.dir): updated = True if jdk_libraries and updated: mx.log("Setting up JDK libraries using {0}".format(jdk)) # Set annotation processor profiles up, and link them to modules in compiler.xml compilerXml.open('component', attributes={'name': 'CompilerConfiguration'}) compilerXml.element('option', attributes={'name': "DEFAULT_COMPILER", 'value': 'Javac'}) # using the --release option with javac interferes with using --add-modules which is required for some projects compilerXml.element('option', attributes={'name': "USE_RELEASE_OPTION", 'value': 'false'}) compilerXml.element('resourceExtensions') compilerXml.open('wildcardResourcePatterns') compilerXml.element('entry', attributes={'name': '!?*.java'}) compilerXml.close('wildcardResourcePatterns') if annotationProcessorProfiles: compilerXml.open('annotationProcessing') for t, modules in sorted(annotationProcessorProfiles.items()): source_gen_dir = t[0] processors = t[1:] compilerXml.open('profile', attributes={'default': 'false', 'name': '-'.join([ap.name for ap in processors]) + "-" + source_gen_dir, 'enabled': 'true'}) compilerXml.element('sourceOutputDir', attributes={'name': join(os.pardir, source_gen_dir)}) compilerXml.element('sourceTestOutputDir', attributes={'name': join(os.pardir, source_gen_dir)}) compilerXml.open('processorPath', attributes={'useClasspath': 'false'}) # IntelliJ supports both directories and jars on the annotation processor path whereas # Eclipse only supports jars. for apDep in processors: def processApDep(dep, edge): if dep.isLibrary() or dep.isJARDistribution(): compilerXml.element('entry', attributes={'name': mx.relpath_or_absolute(dep.path, s.dir, prefix='$PROJECT_DIR$')}) elif dep.isProject(): compilerXml.element('entry', attributes={'name': mx.relpath_or_absolute(dep.output_dir(), s.dir, prefix='$PROJECT_DIR$')}) apDep.walk_deps(visit=processApDep) compilerXml.close('processorPath') for module in modules: compilerXml.element('module', attributes={'name': module.name}) compilerXml.close('profile') compilerXml.close('annotationProcessing') compilerXml.close('component') if compilerXml: compilerXml.close('project') compilerFile = join(ideaProjectDirectory, 'compiler.xml') mx.update_file(compilerFile, compilerXml.xml(indent=' ', newl='\n')) if not module_files_only: # Write misc.xml for global JDK config miscXml = mx.XMLDoc() miscXml.open('project', attributes={'version' : '4'}) if java_modules: mainJdk = mx.get_jdk() miscXml.open('component', attributes={'name' : 'ProjectRootManager', 'version': '2', 'languageLevel': _complianceToIntellijLanguageLevel(mainJdk.javaCompliance), 'project-jdk-name': intellij_get_java_sdk_name(sdks, mainJdk), 'project-jdk-type': intellij_java_sdk_type}) miscXml.element('output', attributes={'url' : 'file://$PROJECT_DIR$/' + os.path.relpath(s.get_output_root(), s.dir)}) miscXml.close('component') else: miscXml.element('component', attributes={'name' : 'ProjectRootManager', 'version': '2', 'project-jdk-name': intellij_get_python_sdk_name(sdks), 'project-jdk-type': intellij_python_sdk_type}) miscXml.close('project') miscFile = join(ideaProjectDirectory, 'misc.xml') mx.update_file(miscFile, miscXml.xml(indent=' ', newl='\n')) # Generate a default configuration for debugging Graal runConfig = mx.XMLDoc() runConfig.open('component', attributes={'name' : 'ProjectRunConfigurationManager'}) runConfig.open('configuration', attributes={'default' :'false', 'name' : 'GraalDebug', 'type' : 'Remote', 'factoryName': 'Remote'}) runConfig.element('option', attributes={'name' : 'USE_SOCKET_TRANSPORT', 'value' : 'true'}) runConfig.element('option', attributes={'name' : 'SERVER_MODE', 'value' : 'false'}) runConfig.element('option', attributes={'name' : 'SHMEM_ADDRESS', 'value' : 'javadebug'}) runConfig.element('option', attributes={'name' : 'HOST', 'value' : 'localhost'}) runConfig.element('option', attributes={'name' : 'PORT', 'value' : '8000'}) runConfig.open('RunnerSettings', attributes={'RunnerId' : 'Debug'}) runConfig.element('option', attributes={'name' : 'DEBUG_PORT', 'value' : '8000'}) runConfig.element('option', attributes={'name' : 'LOCAL', 'value' : 'false'}) runConfig.close('RunnerSettings') runConfig.element('method') runConfig.close('configuration') runConfig.close('component') runConfigFile = join(ideaProjectDirectory, 'runConfigurations', 'GraalDebug.xml') mx.ensure_dir_exists(join(ideaProjectDirectory, 'runConfigurations')) mx.update_file(runConfigFile, runConfig.xml(indent=' ', newl='\n')) if java_modules: # Eclipse formatter config corePrefsSources = s.eclipse_settings_sources().get('org.eclipse.jdt.core.prefs') uiPrefsSources = s.eclipse_settings_sources().get('org.eclipse.jdt.ui.prefs') if corePrefsSources: miscXml = mx.XMLDoc() miscXml.open('project', attributes={'version' : '4'}) out = StringIO() print('# GENERATED -- DO NOT EDIT', file=out) for source in corePrefsSources: print('# Source:', source, file=out) with open(source) as fileName: for line in fileName: if line.startswith('org.eclipse.jdt.core.formatter.'): print(line.strip(), file=out) formatterConfigFile = join(ideaProjectDirectory, 'EclipseCodeFormatter.prefs') mx.update_file(formatterConfigFile, out.getvalue()) importConfigFile = None if uiPrefsSources: out = StringIO() print('# GENERATED -- DO NOT EDIT', file=out) for source in uiPrefsSources: print('# Source:', source, file=out) with open(source) as fileName: for line in fileName: if line.startswith('org.eclipse.jdt.ui.importorder') \ or line.startswith('org.eclipse.jdt.ui.ondemandthreshold') \ or line.startswith('org.eclipse.jdt.ui.staticondemandthreshold'): print(line.strip(), file=out) importConfigFile = join(ideaProjectDirectory, 'EclipseImports.prefs') mx.update_file(importConfigFile, out.getvalue()) miscXml.open('component', attributes={'name' : 'EclipseCodeFormatterProjectSettings'}) miscXml.open('option', attributes={'name' : 'projectSpecificProfile'}) miscXml.open('ProjectSpecificProfile') miscXml.element('option', attributes={'name' : 'formatter', 'value' : 'ECLIPSE'}) custom_eclipse_exe = mx.get_env('ECLIPSE_EXE') if custom_eclipse_exe: custom_eclipse = dirname(custom_eclipse_exe) if mx.is_darwin(): custom_eclipse = join(dirname(custom_eclipse), 'Eclipse') if not exists(custom_eclipse_exe): mx.abort('Custom eclipse "{}" does not exist'.format(custom_eclipse_exe)) miscXml.element('option', attributes={'name' : 'eclipseVersion', 'value' : 'CUSTOM'}) miscXml.element('option', attributes={'name' : 'pathToEclipse', 'value' : custom_eclipse}) miscXml.element('option', attributes={'name' : 'pathToConfigFileJava', 'value' : '$PROJECT_DIR$/.idea/' + basename(formatterConfigFile)}) if importConfigFile: miscXml.element('option', attributes={'name' : 'importOrderConfigFilePath', 'value' : '$PROJECT_DIR$/.idea/' + basename(importConfigFile)}) miscXml.element('option', attributes={'name' : 'importOrderFromFile', 'value' : 'true'}) miscXml.close('ProjectSpecificProfile') miscXml.close('option') miscXml.close('component') miscXml.close('project') miscFile = join(ideaProjectDirectory, 'eclipseCodeFormatter.xml') mx.update_file(miscFile, miscXml.xml(indent=' ', newl='\n')) if java_modules: # Write codestyle settings mx.ensure_dir_exists(join(ideaProjectDirectory, 'codeStyles')) codeStyleConfigXml = mx.XMLDoc() codeStyleConfigXml.open('component', attributes={'name': 'ProjectCodeStyleConfiguration'}) codeStyleConfigXml.open('state') codeStyleConfigXml.element('option', attributes={'name': 'USE_PER_PROJECT_SETTINGS', 'value': 'true'}) codeStyleConfigXml.close('state') codeStyleConfigXml.close('component') codeStyleConfigFile = join(ideaProjectDirectory, 'codeStyles', 'codeStyleConfig.xml') mx.update_file(codeStyleConfigFile, codeStyleConfigXml.xml(indent=' ', newl='\n')) codeStyleProjectXml = mx.XMLDoc() codeStyleProjectXml.open('component', attributes={'name': 'ProjectCodeStyleConfiguration'}) codeStyleProjectXml.open('code_scheme', attributes={'name': 'Project', 'version': '173'}) codeStyleProjectXml.open('JavaCodeStyleSettings') # We cannot entirely disable wildcards import, but we can set the threshold to an insane number. codeStyleProjectXml.element('option', attributes={'name': 'CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND', 'value': '65536'}) codeStyleProjectXml.element('option', attributes={'name': 'NAMES_COUNT_TO_USE_IMPORT_ON_DEMAND', 'value': '65536'}) codeStyleProjectXml.close('JavaCodeStyleSettings') codeStyleProjectXml.close('code_scheme') codeStyleProjectXml.close('component') codeStyleProjectFile = join(ideaProjectDirectory, 'codeStyles', 'Project.xml') mx.update_file(codeStyleProjectFile, codeStyleProjectXml.xml(indent=' ', newl='\n')) # Write checkstyle-idea.xml for the CheckStyle-IDEA checkstyleXml = mx.XMLDoc() checkstyleXml.open('project', attributes={'version': '4'}) checkstyleXml.open('component', attributes={'name': 'CheckStyle-IDEA'}) checkstyleXml.open('option', attributes={'name' : "configuration"}) checkstyleXml.open('map') if max_checkstyle_version: checkstyleXml.element('entry', attributes={'key': "checkstyle-version", 'value': str(max_checkstyle_version)}) # Initialize an entry for each style that is used checkstyleConfigs = set([]) for p in s.projects_recursive(): if not p.isJavaProject(): continue csConfig, checkstyleVersion, checkstyleProj = p.get_checkstyle_config() if not csConfig or csConfig in checkstyleConfigs: continue checkstyleConfigs.add(csConfig) checkstyleXml.element('entry', attributes={'key' : "location-" + str(len(checkstyleConfigs)), 'value': "PROJECT_RELATIVE:" + join(checkstyleProj.dir, ".checkstyle_checks.xml") + ":" + checkstyleProj.name}) checkstyleXml.close('map') checkstyleXml.close('option') checkstyleXml.close('component') checkstyleXml.close('project') checkstyleFile = join(ideaProjectDirectory, 'checkstyle-idea.xml') mx.update_file(checkstyleFile, checkstyleXml.xml(indent=' ', newl='\n')) # mx integration def antTargetName(dist): return 'archive_' + dist.name def artifactFileName(dist): return dist.name.replace('.', '_').replace('-', '_') + '.xml' validDistributions = [dist for dist in mx.sorted_dists() if not dist.suite.isBinarySuite() and not dist.isTARDistribution()] # 1) Make an ant file for archiving the distributions. antXml = mx.XMLDoc() antXml.open('project', attributes={'name': s.name, 'default': 'archive'}) for dist in validDistributions: antXml.open('target', attributes={'name': antTargetName(dist)}) antXml.open('exec', attributes={'executable': sys.executable}) antXml.element('arg', attributes={'value': join(mx._mx_home, 'mx.py')}) antXml.element('arg', attributes={'value': 'archive'}) antXml.element('arg', attributes={'value': '@' + dist.name}) antXml.close('exec') antXml.close('target') antXml.close('project') antFile = join(ideaProjectDirectory, 'ant-mx-archive.xml') mx.update_file(antFile, antXml.xml(indent=' ', newl='\n')) # 2) Tell IDEA that there is an ant-build. ant_mx_archive_xml = 'file://$PROJECT_DIR$/.idea/ant-mx-archive.xml' metaAntXml = mx.XMLDoc() metaAntXml.open('project', attributes={'version': '4'}) metaAntXml.open('component', attributes={'name': 'AntConfiguration'}) metaAntXml.open('buildFile', attributes={'url': ant_mx_archive_xml}) metaAntXml.close('buildFile') metaAntXml.close('component') metaAntXml.close('project') metaAntFile = join(ideaProjectDirectory, 'ant.xml') mx.update_file(metaAntFile, metaAntXml.xml(indent=' ', newl='\n')) # 3) Make an artifact for every distribution validArtifactNames = {artifactFileName(dist) for dist in validDistributions} artifactsDir = join(ideaProjectDirectory, 'artifacts') mx.ensure_dir_exists(artifactsDir) for fileName in os.listdir(artifactsDir): filePath = join(artifactsDir, fileName) if os.path.isfile(filePath) and fileName not in validArtifactNames: os.remove(filePath) for dist in validDistributions: artifactXML = mx.XMLDoc() artifactXML.open('component', attributes={'name': 'ArtifactManager'}) artifactXML.open('artifact', attributes={'build-on-make': 'true', 'name': dist.name}) artifactXML.open('output-path', data='$PROJECT_DIR$/mxbuild/artifacts/' + dist.name) artifactXML.close('output-path') artifactXML.open('properties', attributes={'id': 'ant-postprocessing'}) artifactXML.open('options', attributes={'enabled': 'true'}) artifactXML.open('file', data=ant_mx_archive_xml) artifactXML.close('file') artifactXML.open('target', data=antTargetName(dist)) artifactXML.close('target') artifactXML.close('options') artifactXML.close('properties') artifactXML.open('root', attributes={'id': 'root'}) for javaProject in [dep for dep in dist.archived_deps() if dep.isJavaProject()]: artifactXML.element('element', attributes={'id': 'module-output', 'name': javaProject.name}) for javaProject in [dep for dep in dist.deps if dep.isLibrary() or dep.isDistribution()]: artifactXML.element('element', attributes={'id': 'artifact', 'artifact-name': javaProject.name}) artifactXML.close('root') artifactXML.close('artifact') artifactXML.close('component') artifactFile = join(artifactsDir, artifactFileName(dist)) mx.update_file(artifactFile, artifactXML.xml(indent=' ', newl='\n')) def intellij_scm_name(vc_kind): if vc_kind == 'git': return 'Git' elif vc_kind == 'hg': return 'hg4idea' vcsXml = mx.XMLDoc() vcsXml.open('project', attributes={'version': '4'}) vcsXml.open('component', attributes={'name': 'VcsDirectoryMappings'}) suites_for_vcs = mx.suites() + ([mx._mx_suite] if mx_python_modules else []) sourceSuitesWithVCS = [vc_suite for vc_suite in suites_for_vcs if vc_suite.isSourceSuite() and vc_suite.vc is not None] uniqueSuitesVCS = {(vc_suite.vc_dir, vc_suite.vc.kind) for vc_suite in sourceSuitesWithVCS} for vcs_dir, kind in uniqueSuitesVCS: vcsXml.element('mapping', attributes={'directory': vcs_dir, 'vcs': intellij_scm_name(kind)}) vcsXml.close('component') vcsXml.close('project') vcsFile = join(ideaProjectDirectory, 'vcs.xml') mx.update_file(vcsFile, vcsXml.xml(indent=' ', newl='\n'))
def _intellij_external_project(externalProjects, sdks, host): if externalProjects: for project_name, project_definition in externalProjects.items(): if not project_definition.get('path', None): mx.abort("external project {} is missing path attribute".format(project_name)) if not project_definition.get('type', None): mx.abort("external project {} is missing type attribute".format(project_name)) supported = ['path', 'type', 'source', 'test', 'excluded', 'load_path'] unknown = set(project_definition.keys()) - frozenset(supported) if unknown: mx.abort("There are unsupported {} keys in {} external project".format(unknown, project_name)) path = os.path.realpath(join(host.dir, project_definition["path"])) module_type = project_definition["type"] moduleXml = mx.XMLDoc() moduleXml.open('module', attributes={'type': {'ruby': 'RUBY_MODULE', 'python': 'PYTHON_MODULE', 'web': 'WEB_MODULE'}.get(module_type, 'UKNOWN_MODULE'), 'version': '4'}) moduleXml.open('component', attributes={'name': 'NewModuleRootManager', 'inherit-compiler-output': 'true'}) moduleXml.element('exclude-output') moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'}) for name in project_definition.get('source', []): moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + name, 'isTestSource': str(False)}) for name in project_definition.get('test', []): moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + name, 'isTestSource': str(True)}) for name in project_definition.get('excluded', []): _intellij_exclude_if_exists(moduleXml, type('', (object,), {"dir": path})(), name) moduleXml.close('content') if module_type == "ruby": moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_ruby_sdk_type, 'jdkName': intellij_get_ruby_sdk_name(sdks)}) elif module_type == "python": moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_python_sdk_type, 'jdkName': intellij_get_python_sdk_name(sdks)}) elif module_type == "web": # nothing to do pass else: mx.abort("External project type {} not supported".format(module_type)) moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'}) moduleXml.close('component') load_paths = project_definition.get('load_path', []) if load_paths: if not module_type == "ruby": mx.abort("load_path is supported only for ruby type external project") moduleXml.open('component', attributes={'name': 'RModuleSettingsStorage'}) load_paths_attributes = {} load_paths_attributes['number'] = str(len(load_paths)) for i, name in enumerate(load_paths): load_paths_attributes["string" + str(i)] = "$MODULE_DIR$/" + name moduleXml.element('LOAD_PATH', load_paths_attributes) moduleXml.close('component') moduleXml.close('module') moduleFile = join(path, project_name + '.iml') mx.update_file(moduleFile, moduleXml.xml(indent=' ', newl='\n')) if not module_files_only: declared_modules.add(project_name) moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(moduleFile, s.dir) modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath})
def build(self): source_dir = self.subject.getSourceDir() output_dir = self.subject.getOutputDir() if not emcc_dir: mx.abort("No EMCC_DIR specified - the source programs will not be compiled to .wasm.") emcc_cmd = os.path.join(emcc_dir, "emcc") gcc_cmd = os.path.join(gcc_dir, "gcc") if mx.run([emcc_cmd, "-v"], nonZeroIsFatal=False) != 0: mx.abort("Could not check the emcc version.") if mx.run([gcc_cmd, "--version"], nonZeroIsFatal=False) != 0: mx.abort("Could not check the gcc version.") if not wabt_dir: mx.abort("Set WABT_DIR if you want the binary to include .wat files.") mx.log("Building files from the source dir: " + source_dir) cc_flags = ["-g2", "-O3"] include_flags = [] if hasattr(self.project, "includeset"): include_flags = ["-I", os.path.join(_suite.dir, "includes", self.project.includeset)] emcc_flags = ["-s", "EXIT_RUNTIME=1", "-s", "STANDALONE_WASM", "-s", "WASM_BIGINT"] + cc_flags if self.project.isBenchmarkProject(): emcc_flags = emcc_flags + ["-s", "EXPORTED_FUNCTIONS=" + str(self.benchmark_methods()).replace("'", "\"") + ""] subdir_program_names = defaultdict(lambda: []) for root, filename in self.subject.getProgramSources(): subdir = os.path.relpath(root, self.subject.getSourceDir()) mx.ensure_dir_exists(os.path.join(output_dir, subdir)) basename = remove_extension(filename) source_path = os.path.join(root, filename) output_wasm_path = os.path.join(output_dir, subdir, basename + ".wasm") output_js_path = os.path.join(output_dir, subdir, basename + ".js") timestampedSource = mx.TimeStampFile(source_path) timestampedOutput = mx.TimeStampFile(output_wasm_path) mustRebuild = timestampedSource.isNewerThan(timestampedOutput) or not timestampedOutput.exists() # Step 1: build the .wasm binary. if mustRebuild: if filename.endswith(".c"): # This generates both a js file and a wasm file. # See https://github.com/emscripten-core/emscripten/wiki/WebAssembly-Standalone build_cmd_line = [emcc_cmd] + emcc_flags + [source_path, "-o", output_js_path] + include_flags if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not build the wasm-only output of " + filename + " with emcc.") elif filename.endswith(".wat"): # Step 1: compile the .wat file to .wasm. wat2wasm_cmd = os.path.join(wabt_dir, "wat2wasm") build_cmd_line = [wat2wasm_cmd, "-o", output_wasm_path, source_path] if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not translate " + filename + " to binary format.") elif filename.endswith(".wasm"): shutil.copyfile(source_path, output_wasm_path) else: mx.logv("skipping, file is up-to-date: " + source_path) # Step 2: copy the result file if it exists. result_path = os.path.join(root, basename + ".result") if os.path.isfile(result_path): result_output_path = os.path.join(output_dir, subdir, basename + ".result") shutil.copyfile(result_path, result_output_path) # Step 3: copy the opts file if it exists. opts_path = os.path.join(root, basename + ".opts") if os.path.isfile(opts_path): opts_output_path = os.path.join(output_dir, subdir, basename + ".opts") shutil.copyfile(opts_path, opts_output_path) output_wat_path = os.path.join(output_dir, subdir, basename + ".wat") if mustRebuild: if filename.endswith(".c"): # Step 4: produce the .wat files, for easier debugging. wasm2wat_cmd = os.path.join(wabt_dir, "wasm2wat") if mx.run([wasm2wat_cmd, "-o", output_wat_path, output_wasm_path], nonZeroIsFatal=False) != 0: mx.abort("Could not compile .wat file for " + filename) elif filename.endswith(".wat"): # Step 4: copy the .wat file, for easier debugging. wat_path = os.path.join(root, basename + ".wat") shutil.copyfile(wat_path, output_wat_path) # Step 5: if this is a benchmark project, create native binaries too. if mustRebuild: if filename.endswith(".c"): mx.ensure_dir_exists(os.path.join(output_dir, subdir, NATIVE_BENCH_DIR)) output_path = os.path.join(output_dir, subdir, NATIVE_BENCH_DIR, mx.exe_suffix(basename)) link_flags = ["-lm"] gcc_cmd_line = [gcc_cmd] + cc_flags + [source_path, "-o", output_path] + include_flags + link_flags if mx.run(gcc_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not build the native binary of " + filename + ".") os.chmod(output_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) elif filename.endswith(".wat"): mx.warn("The .wat files are not translated to native binaries: " + filename) # Remember the source name. subdir_program_names[subdir].append(basename) for subdir in subdir_program_names: with open(os.path.join(output_dir, subdir, "wasm_test_index"), "w") as f: for name in subdir_program_names[subdir]: f.write(name) f.write("\n")
def _espresso_gate_runner(args, tasks): # Jackpot configuration is inherited from Truffle. with Task('Jackpot', tasks, tags=[EspressoTags.jackpot]) as t: if t: jackpot(['--fail-on-warnings'], suite=None, nonZeroIsFatal=True) with Task('Espresso: GraalVM dist names', tasks, tags=['names']) as t: if t: mx_sdk_vm.verify_graalvm_configs(suites=['espresso']) mokapot_header_gate_name = 'Verify consistency of mokapot headers' with Task(mokapot_header_gate_name, tasks, tags=[EspressoTags.verify]) as t: if t: run_instructions = "$ mx --dynamicimports=/substratevm --native-images=lib:javavm gate --all-suites --task '{}'".format( mokapot_header_gate_name) if mx_sdk_vm_impl._skip_libraries(espresso_library_config): mx.abort("""\ The registration of the Espresso library ('lib:javavm') is skipped. Please run this gate as follows: {}""".format(run_instructions)) errors = False mokapot_dir = join( mx.project('com.oracle.truffle.espresso.mokapot').dir, 'include') libjavavm_dir = mx.project( mx_sdk_vm_impl.GraalVmNativeImage.project_name( espresso_library_config)).get_output_root() for header in ['libjavavm_dynamic.h', 'graal_isolate_dynamic.h']: committed_header = join(mokapot_dir, header) if not mx.exists(committed_header): mx.abort( "Cannot locate '{}'. Was the file moved or renamed?". format(committed_header)) generated_header = join(libjavavm_dir, header) if not mx.exists(generated_header): mx.abort( "Cannot locate '{}'. Did you forget to build? Example:\n'mx --dynamicimports=/substratevm --native-images=lib:javavm build'" .format(generated_header)) committed_header_copyright = [] with open(committed_header, 'r') as committed_header_file: for line in committed_header_file.readlines(): if line == '/*\n' or line.startswith( ' *') or line == '*/\n': committed_header_copyright.append(line) else: break with open(generated_header, 'r') as generated_header_file: generated_header_lines = [] for line in generated_header_file.readlines(): # Ignore definitions that are not needed for Espresso if not line.startswith( "typedef" ) or "(*Espresso_" in line or "__graal" in line or "(*graal_" in line: generated_header_lines.append(line) else: newline = generated_header_lines.pop( ) # Remove newline before ignored declaration assert newline == "\n" errors = errors or mx.update_file( committed_header, ''.join(committed_header_copyright + generated_header_lines), showDiff=True) if errors: mx.abort("""\ One or more header files in the include dir of the mokapot project ('{committed}/') do not match those generated by Native Image ('{generated}/'). To fix the issue, run this gate locally: {instructions} And adapt the code to the modified headers in '{committed}'. """.format(committed=os.path.relpath(mokapot_dir, _suite.vc_dir), generated=os.path.relpath(libjavavm_dir, _suite.vc_dir), instructions=run_instructions))
def _get_toolchain(toolchain_name): if toolchain_name not in _toolchains: mx.abort("Toolchain '{}' does not exists! Known toolchains {}".format( toolchain_name, ", ".join(_toolchains.keys()))) return _toolchains[toolchain_name]
def _check_tool(self, tool): if tool not in self._supported_tools(): mx.abort( "The {} toolchain (defined by {}) does not support tool '{}'". format(self.name, self.dist, tool))
def _fastr_gate_runner(args, tasks): with mx_gate.Task('Setup no specials', tasks, tags=[FastRGateTags.no_specials]) as t: if t: os.environ['FASTR_OPTION_UseSpecials'] = 'false' with mx_gate.Task('Setup no dsl cache', tasks, tags=[FastRGateTags.no_dsl_cache]) as t: if t: os.environ['FASTR_OPTION_DSLCacheSizeFactor'] = '0' with mx_gate.Task('SetupLLVM', tasks, tags=[FastRGateTags.llvm]) as t: if t: os.environ['FASTR_RFFI'] = 'llvm' with mx_gate.Task('GCTorture1', tasks, tags=[FastRGateTags.gc_torture1]) as t: if t: os.environ['FASTR_GCTORTURE'] = '1' with mx_gate.Task('GCTorture3', tasks, tags=[FastRGateTags.gc_torture3]) as t: if t: os.environ['FASTR_GCTORTURE'] = '3' with mx_gate.Task('VerySlowAsserts', tasks, tags=[FastRGateTags.very_slow_asserts]) as t: if t: os.environ['FASTR_TEST_VERY_SLOW_ASSERTS'] = 'true' ''' The specific additional gates tasks provided by FastR. ''' with mx_gate.Task('ExtSoftVersions', tasks, tags=[mx_gate.Tags.always]) as t: if t: new_env = os.environ.copy() new_env['R_DEFAULT_PACKAGES'] = 'base' run_r(['-q', '-e', 'extSoftVersion()'], 'R', env=new_env) with mx_gate.Task('LibsInfo', tasks, tags=[mx_gate.Tags.always]) as t: if t: mx.log("Libraries captured in FASTR_HOME/lib:") lib_dir = os.path.join(_fastr_suite.dir, 'lib') ldd = ['otool', '-L'] if platform.system() == 'Darwin' else ['ldd'] for f in os.listdir(lib_dir): full_path = os.path.join(lib_dir, f) mx.run(['file', full_path], nonZeroIsFatal=False) mx.log('---\nobjdump:') mx.run(['objdump', '-s', '--section', '.comment', full_path], nonZeroIsFatal=False) mx.log('---\nlinking info:') mx.run(ldd + [full_path], nonZeroIsFatal=False) mx.log('---------') # --------------------------------- # Style checks: # FastR has custom copyright check with mx_gate.Task('Copyright check', tasks, tags=[mx_gate.Tags.style]) as t: if t: if mx.checkcopyrights(['--primary']) != 0: t.abort('copyright errors') # check that the expected test output file is up to date with mx_gate.Task('UnitTests: ExpectedTestOutput file check', tasks, tags=[mx_gate.Tags.style]) as t: if t: mx_unittest.unittest([ '-Dfastr.test.gen.expected=' + _test_srcdir(), '-Dfastr.test.check.expected=true' ] + _gate_unit_tests()) # ---------------------------------- # Basic tests: with mx_gate.Task( 'UnitTests', tasks, tags=[FastRGateTags.basic_tests, FastRGateTags.unit_tests]) as t: if t: mx_unittest.unittest(_gate_noapps_unit_tests()) with mx_gate.Task('Rembedded', tasks, tags=[FastRGateTags.basic_tests]) as t: if t: if rembedtest([]) != 0: t.abort("Rembedded tests failed") # ---------------------------------- # Package tests: with mx_gate.Task('Recommended load test', tasks, tags=[FastRGateTags.recommended_load]) as t: if t: # Note: this is a convenience mx gate job for testing the loading of recommended packages # We also test the loading of recommended pkgs in the "graalvm-tests" if not os.path.exists( os.path.join(_fastr_suite.dir, 'library', 'spatial')): mx.abort( 'Recommended packages seem to be not installed in FastR. Did you forget to build with FASTR_RELEASE=true?' ) pkgs = [ 'codetools', 'MASS', 'boot', 'class', 'cluster', 'lattice', 'nnet', 'spatial', 'Matrix', 'KernSmooth', 'foreign', 'nlme', 'rpart', 'survival' ] # Creates code that looks like: require(codetools) && require(MASS) && ... require_stmts = ' && '.join( ['require(' + pkg + ')' for pkg in pkgs]) test_load = 'if (!(' + require_stmts + ')) q(status=1) else q(status=42)' if run_r(['--vanilla', '-e', test_load], 'R', nonZeroIsFatal=False) != 42: mx.abort("Loading of recommended packages failed") with mx_gate.Task('Internal pkg test', tasks, tags=[FastRGateTags.internal_pkgs_test]) as t: if t: internal_pkg_tests() # CRAN packages are listed in files com.oracle.truffle.r.test.packages/gated0, gated1, ... # We loop over all such files and crete gate task for each of them # See also documentation in FastRGateTags.cran_pkgs_tests for i in range(1, 1000): list_file = os.path.join( _fastr_suite.dir, 'com.oracle.truffle.r.test.packages/gated' + str(i)) if not os.path.exists(list_file): break with mx_gate.Task('CRAN pkg test: ' + str(i), tasks, tags=[FastRGateTags.cran_pkgs_test + str(i)]) as t: if t: check_last = False if mx_gate.Task.tags is None else FastRGateTags.cran_pkgs_test_check_last in mx_gate.Task.tags # pylint: disable=unsupported-membership-test if check_last: next_file = os.path.join( _fastr_suite.dir, 'com.oracle.truffle.r.test.packages/gated' + str(i + 1)) if os.path.exists(next_file): mx.abort( "File %s exists, but the gate thinks that %s is the last file. Did you forget to update the gate configuration?" % (next_file, list_file)) cran_pkg_tests(list_file)
def __exit__(self, tp, value, tb): self.stdout_file.flush() self.stderr_file.flush() if self.exit_code == 0 and (tb is None): if self.current_stage == self.config.last_stage: self.bench_out( 'Successfully finished the last specified stage:' + ' ' + self.current_stage + ' for ' + self.final_image_name) else: mx.log('Successfully finished stage:' + ' ' + self.current_stage) self.separator_line() else: self.failed = True if self.exit_code is not None and self.exit_code != 0: mx.log( mx.colorize( 'Failed in stage ' + self.current_stage + ' for ' + self.final_image_name + ' with exit code ' + str(self.exit_code), 'red')) if self.stdout_path: mx.log( mx.colorize('--------- Standard output:', 'blue')) with open(self.stdout_path, 'r') as stdout: mx.log(stdout.read()) if self.stderr_path: mx.log(mx.colorize('--------- Standard error:', 'red')) with open(self.stderr_path, 'r') as stderr: mx.log(stderr.read()) if tb: mx.log( mx.colorize( 'Failed in stage ' + self.current_stage + ' with ', 'red')) print_tb(tb) self.separator_line() if len(self.stages_till_now) > 0: mx.log( mx.colorize( '--------- To run the failed benchmark execute the following: ', 'green')) mx.log(mx.current_mx_command()) if len(self.stages_till_now[:-1]) > 0: mx.log( mx.colorize( '--------- To only prepare the benchmark add the following to the previous command: ', 'green')) mx.log('-Dnative-image.benchmark.stages=' + ','.join(self.stages_till_now[:-1])) mx.log( mx.colorize( '--------- To only run the failed stage add the following to the previous command: ', 'green')) mx.log('-Dnative-image.benchmark.stages=' + self.current_stage) mx.log( mx.colorize( '--------- Additional params that can be used for the benchmark are with -Dnative-image.benchmark.<param>: ', 'green')) mx.log(', '.join(self.config.params)) self.separator_line() if self.non_zero_is_fatal: mx.abort('Exiting the benchmark due to the failure.') self.stdout_file.close() self.stderr_file.close() self.reset_stage()
def jlink_new_jdk(jdk, dst_jdk_dir, module_dists, root_module_names=None, missing_export_target_action='create', with_source=lambda x: True, vendor_info=None, dedup_legal_notices=True): """ Uses jlink from `jdk` to create a new JDK image in `dst_jdk_dir` with `module_dists` and their dependencies added to the JDK image, replacing any existing modules of the same name. :param JDKConfig jdk: source JDK :param str dst_jdk_dir: path to use for the jlink --output option :param list module_dists: list of distributions defining modules :param list root_module_names: list of strings naming the module root set for the new JDK image. The named modules must either be in `module_dists` or in `jdk`. If None, then the root set will be all the modules in ``module_dists` and `jdk`. :param str missing_export_target_action: the action to perform for a qualifed export target that is not present in `module_dists` and does not have a hash stored in java.base. The choices are: "create" - an empty module is created "error" - raise an error None - do nothing :param lambda with_source: returns True if the sources of a module distribution must be included in the new JDK :param dict vendor_info: values for the jlink vendor options added by JDK-8232080 """ assert callable(with_source) if jdk.javaCompliance < '9': mx.abort('Cannot derive a new JDK from ' + jdk.home + ' with jlink since it is not JDK 9 or later') exploded_java_base_module = join(jdk.home, 'modules', 'java.base') if exists(exploded_java_base_module): mx.abort( 'Cannot derive a new JDK from ' + jdk.home + ' since it appears to be a developer build with exploded modules') jimage = join(jdk.home, 'lib', 'modules') jmods_dir = join(jdk.home, 'jmods') if not isfile(jimage): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jimage + ' is missing or is not an ordinary file') if not isdir(jmods_dir): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jmods_dir + ' is missing or is not a directory') jdk_modules = {jmd.name: jmd for jmd in jdk.get_modules()} modules = [as_java_module(dist, jdk) for dist in module_dists] all_module_names = frozenset( list(jdk_modules.keys()) + [m.name for m in modules]) # Read hashes stored in java.base (the only module in the JDK where hashes are stored) out = mx.LinesOutputCapture() mx.run([ jdk.exe_path('jmod'), 'describe', jdk_modules['java.base'].get_jmod_path() ], out=out) lines = out.lines hashes = {} for line in lines: if line.startswith('hashes'): parts = line.split() assert len( parts ) == 4, 'expected hashes line to have 4 fields, got {} fields: {}'.format( len(parts), line) _, module_name, algorithm, hash_value = parts hashes[module_name] = (algorithm, hash_value) build_dir = mx.ensure_dir_exists(join(dst_jdk_dir + ".build")) try: # Handle targets of qualified exports that are not present in `modules` target_requires = {} for jmd in modules: for targets in jmd.exports.values(): for target in targets: if target not in all_module_names and target not in hashes: target_requires.setdefault(target, set()).add(jmd.name) if target_requires and missing_export_target_action is not None: if missing_export_target_action == 'error': mx.abort( 'Target(s) of qualified exports cannot be resolved: ' + '.'.join(target_requires.keys())) assert missing_export_target_action == 'create', 'invalid value for missing_export_target_action: ' + str( missing_export_target_action) extra_modules = [] for name, requires in target_requires.items(): module_jar = join(build_dir, name + '.jar') jmd = JavaModuleDescriptor( name, {}, requires={module: [] for module in requires}, uses=set(), provides={}, jarpath=module_jar) extra_modules.append(jmd) module_build_dir = mx.ensure_dir_exists(join(build_dir, name)) module_info_java = join(module_build_dir, 'module-info.java') module_info_class = join(module_build_dir, 'module-info.class') with open(module_info_java, 'w') as fp: print(jmd.as_module_info(), file=fp) mx.run([ jdk.javac, '-d', module_build_dir, '--limit-modules=java.base,' + ','.join(jmd.requires.keys()), '--module-path=' + os.pathsep.join( (m.jarpath for m in modules)), module_info_java ]) with ZipFile(module_jar, 'w') as zf: zf.write(module_info_class, basename(module_info_class)) if exists(jmd.get_jmod_path()): os.remove(jmd.get_jmod_path()) mx.run([ jdk.javac.replace('javac', 'jmod'), 'create', '--class-path=' + module_build_dir, jmd.get_jmod_path() ]) modules.extend(extra_modules) all_module_names = frozenset( list(jdk_modules.keys()) + [m.name for m in modules]) # Extract src.zip from source JDK jdk_src_zip = join(jdk.home, 'lib', 'src.zip') dst_src_zip_contents = {} if isfile(jdk_src_zip): mx.logv('[Extracting ' + jdk_src_zip + ']') with ZipFile(jdk_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[name] = zf.read(name) else: mx.warn("'{}' does not exist or is not a file".format(jdk_src_zip)) # Edit lib/security/default.policy in java.base patched_java_base = join(build_dir, 'java.base.jmod') with open(join(jmods_dir, 'java.base.jmod'), 'rb') as src_f, open(patched_java_base, 'wb') as dst_f: jmod_header = src_f.read(4) if len(jmod_header) != 4 or jmod_header != b'JM\x01\x00': raise mx.abort("Unexpected jmod header: " + b2a_hex(jmod_header).decode('ascii')) dst_f.write(jmod_header) policy_result = 'not found' with ZipFile(src_f, 'r') as src_zip, ZipFile( dst_f, 'w', src_zip.compression) as dst_zip: for i in src_zip.infolist(): if i.filename[-1] == '/': continue src_member = src_zip.read(i) if i.filename == 'lib/security/default.policy': policy_result = 'unmodified' if 'grant codeBase "jrt:/com.oracle.graal.graal_enterprise"'.encode( 'utf-8') not in src_member: policy_result = 'modified' src_member += """ grant codeBase "jrt:/com.oracle.graal.graal_enterprise" { permission java.security.AllPermission; }; """.encode('utf-8') if 'grant codeBase "jrt:/org.graalvm.truffle"'.encode( 'utf-8') not in src_member: policy_result = 'modified' src_member += """ grant codeBase "jrt:/org.graalvm.truffle" { permission java.security.AllPermission; }; grant codeBase "jrt:/org.graalvm.sdk" { permission java.security.AllPermission; }; grant codeBase "jrt:/org.graalvm.locator" { permission java.io.FilePermission "<<ALL FILES>>", "read"; permission java.util.PropertyPermission "*", "read,write"; permission java.lang.RuntimePermission "createClassLoader"; permission java.lang.RuntimePermission "getClassLoader"; permission java.lang.RuntimePermission "getenv.*"; }; grant codeBase "file:${java.home}/languages/-" { permission java.security.AllPermission; }; """.encode('utf-8') dst_zip.writestr(i, src_member) if policy_result == 'not found': raise mx.abort( "Couldn't find `lib/security/default.policy` in " + join(jmods_dir, 'java.base.jmod')) for jmd in modules: # Remove existing sources for all the modules that we include dst_src_zip_contents = { key: dst_src_zip_contents[key] for key in dst_src_zip_contents if not key.startswith(jmd.name) } if with_source(jmd.dist): # Add the sources that we can share. # Extract module sources jmd_src_zip = jmd.jarpath[0:-len('.jar')] + '.src.zip' if isfile(jmd_src_zip): mx.logv('[Extracting ' + jmd_src_zip + ']') with ZipFile(jmd_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[jmd.name + '/' + name] = zf.read(name) # Add module-info.java to sources dst_src_zip_contents[jmd.name + '/module-info.java'] = jmd.as_module_info( extras_as_comments=False) # Now build the new JDK image with jlink jlink = [jdk.javac.replace('javac', 'jlink')] if jdk_enables_jvmci_by_default(jdk): # On JDK 9+, +EnableJVMCI forces jdk.internal.vm.ci to be in the root set jlink += ['-J-XX:-EnableJVMCI', '-J-XX:-UseJVMCICompiler'] if root_module_names is not None: missing = frozenset(root_module_names) - all_module_names if missing: mx.abort( 'Invalid module(s): {}.\nAvailable modules: {}'.format( ','.join(missing), ','.join(sorted(all_module_names)))) jlink.append('--add-modules=' + ','.join(root_module_names)) else: jlink.append('--add-modules=' + ','.join(sorted(all_module_names))) module_path = patched_java_base + os.pathsep + jmods_dir if modules: module_path = os.pathsep.join( (m.get_jmod_path(respect_stripping=True) for m in modules)) + os.pathsep + module_path jlink.append('--module-path=' + module_path) jlink.append('--output=' + dst_jdk_dir) # These options are derived from how OpenJDK runs jlink to produce the final runtime image. jlink.extend([ '-J-XX:+UseSerialGC', '-J-Xms32M', '-J-Xmx512M', '-J-XX:TieredStopAtLevel=1' ]) jlink.append('-J-Dlink.debug=true') if dedup_legal_notices: jlink.append('--dedup-legal-notices=error-if-not-same-content') jlink.append('--keep-packaged-modules=' + join(dst_jdk_dir, 'jmods')) if jdk_has_new_jlink_options(jdk): if jdk_omits_warning_for_jlink_set_ThreadPriorityPolicy(jdk): thread_priority_policy_option = ' -XX:ThreadPriorityPolicy=1' else: mx.logv('[Creating JDK without -XX:ThreadPriorityPolicy=1]') thread_priority_policy_option = '' if jdk_supports_enablejvmciproduct(jdk): if any( (m.name == 'jdk.internal.vm.compiler' for m in modules)): jlink.append( '--add-options=-XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCIProduct -XX:-UnlockExperimentalVMOptions' + thread_priority_policy_option) else: # Don't default to using JVMCI as JIT unless Graal is being updated in the image. # This avoids unexpected issues with using the out-of-date Graal compiler in # the JDK itself. jlink.append( '--add-options=-XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCIProduct -XX:-UseJVMCICompiler -XX:-UnlockExperimentalVMOptions' + thread_priority_policy_option) else: mx.logv('[Creating JDK without -XX:+EnableJVMCIProduct]') if thread_priority_policy_option: jlink.append('--add-options=' + thread_priority_policy_option.strip()) if vendor_info is not None: for name, value in vendor_info.items(): jlink.append('--' + name + '=' + value) release_file = join(jdk.home, 'release') if isfile(release_file): jlink.append('--release-info=' + release_file) # TODO: investigate the options below used by OpenJDK to see if they should be used: # --order-resources: specifies order of resources in generated lib/modules file. # This is apparently not so important if a CDS archive is available. # --generate-jli-classes: pre-generates a set of java.lang.invoke classes. # See https://github.com/openjdk/jdk/blob/master/make/GenerateLinkOptData.gmk mx.logv('[Creating JDK image in {}]'.format(dst_jdk_dir)) mx.run(jlink) dst_src_zip = join(dst_jdk_dir, 'lib', 'src.zip') mx.logv('[Creating ' + dst_src_zip + ']') with ZipFile(dst_src_zip, 'w', compression=ZIP_DEFLATED, allowZip64=True) as zf: for name, contents in sorted(dst_src_zip_contents.items()): zf.writestr(name, contents) mx.logv('[Copying static libraries]') lib_directory = join(jdk.home, 'lib', 'static') if exists(lib_directory): dst_lib_directory = join(dst_jdk_dir, 'lib', 'static') try: mx.copytree(lib_directory, dst_lib_directory) except shutil.Error as e: # On AArch64, there can be a problem in the copystat part # of copytree which occurs after file and directory copying # has successfully completed. Since the metadata doesn't # matter in this case, just ensure that the content was copied. for root, _, lib_files in os.walk(lib_directory): relative_root = os.path.relpath(root, dst_lib_directory) for lib in lib_files: src_lib_path = join(root, lib) dst_lib_path = join(dst_lib_directory, relative_root, lib) if not exists(dst_lib_path): mx.abort( 'Error copying static libraries: {} missing in {}{}Original copytree error: {}' .format(join(relative_root, lib), dst_lib_directory, os.linesep, e)) src_lib_hash = mx.sha1OfFile(src_lib_path) dst_lib_hash = mx.sha1OfFile(dst_lib_path) if src_lib_hash != dst_lib_hash: mx.abort( 'Error copying static libraries: {} (hash={}) and {} (hash={}) differ{}Original copytree error: {}' .format(src_lib_path, src_lib_hash, dst_lib_path, dst_lib_hash, os.linesep, e)) # Allow older JDK versions to work else: lib_prefix = mx.add_lib_prefix('') lib_suffix = mx.add_static_lib_suffix('') lib_directory = join(jdk.home, 'lib') dst_lib_directory = join(dst_jdk_dir, 'lib') for f in os.listdir(lib_directory): if f.startswith(lib_prefix) and f.endswith(lib_suffix): lib_path = join(lib_directory, f) if isfile(lib_path): shutil.copy2(lib_path, dst_lib_directory) finally: if not mx.get_opts().verbose: # Preserve build directory so that javac command can be re-executed # by cutting and pasting verbose output. shutil.rmtree(build_dir) # Create CDS archive (https://openjdk.java.net/jeps/341). out = mx.OutputCapture() mx.logv('[Creating CDS shared archive]') if mx.run([ mx.exe_suffix(join(dst_jdk_dir, 'bin', 'java')), '-Xshare:dump', '-Xmx128M', '-Xms128M' ], out=out, err=out, nonZeroIsFatal=False) != 0: mx.log(out.data) mx.abort('Error generating CDS shared archive')
def getArchivableResults(self, use_relpath=True, single=False): raise mx.abort("Project '{}' cannot be archived".format(self.name))
def import_python_sources(args): # mappings for files that are renamed mapping = { "_memoryview.c": "memoryobject.c", "_cpython_sre.c": "_sre.c", } parser = ArgumentParser(prog='mx python-src-import') parser.add_argument('--cpython', action='store', help='Path to CPython sources', required=True) parser.add_argument('--pypy', action='store', help='Path to PyPy sources', required=True) parser.add_argument('--msg', action='store', help='Message for import update commit', required=True) args = parser.parse_args(args) python_sources = args.cpython pypy_sources = args.pypy import_version = args.msg print """ So you think you want to update the inlined sources? Here is how it will go: 1. We'll first check the copyrights check overrides file to identify the files taken from CPython and we'll remember that list. There's a mapping for files that were renamed, currently this includes: \t{0!r}\n 2. We'll check out the "python-import" branch. This branch has only files that were inlined from CPython or PyPy. We'll use the sources given on the commandline for that. I hope those are in a state where that makes sense. 3. We'll stop and wait to give you some time to check if the python-import branch looks as you expect. Then we'll commit the updated files to the python-import branch, push it, and move back to whatever your HEAD is now. 4. We'll merge the python-import branch back into HEAD. Because these share a common ancestroy, git will try to preserve our patches to files, that is, copyright headers and any other source patches. 5. !IMPORTANT! If files were inlined from CPython during normal development that were not first added to the python-import branch, you will get merge conflicts and git will tell you that the files was added on both branches. You probably should resolve these using: git checkout python-import -- path/to/file Then check the diff and make sure that any patches that we did to those files are re-applied. 6. After the merge is completed and any direct merge conflicts are resolved, run this: mx python-checkcopyrights --fix This will apply copyrights to files that we're newly added from python-import. 7. Run the tests and fix any remaining issues. """.format(mapping) raw_input("Got it?") files = [] with open( os.path.join(os.path.dirname(__file__), "copyrights", "overrides")) as f: files = [ line.split(",")[0] for line in f.read().split("\n") if len(line.split(",")) > 1 and line.split(",")[1] == "python.copyright" ] # move to orphaned branch with sources if SUITE.vc.isDirty(SUITE.dir): mx.abort("Working dir must be clean") tip = SUITE.vc.tip(SUITE.dir).strip() SUITE.vc.git_command(SUITE.dir, ["checkout", "python-import"]) SUITE.vc.git_command(SUITE.dir, ["clean", "-fdx"]) shutil.rmtree("graalpython") for inlined_file in files: # C files are mostly just copied original_file = None name = os.path.basename(inlined_file) name = mapping.get(name, name) if inlined_file.endswith(".h") or inlined_file.endswith(".c"): for root, dirs, files in os.walk(python_sources): if os.path.basename(name) in files: original_file = os.path.join(root, name) try: os.makedirs(os.path.dirname(inlined_file)) except: pass shutil.copy(original_file, inlined_file) break elif inlined_file.endswith(".py"): # these files don't need to be updated, they inline some unittest code only if name.startswith("test_") or name.endswith("_tests.py"): original_file = inlined_file if original_file is None: mx.warn("Could not update %s - original file not found" % inlined_file) # re-copy lib-python libdir = os.path.join(SUITE.dir, "graalpython/lib-python/3") shutil.copytree(os.path.join(pypy_sources, "lib-python", "3"), libdir) # commit and check back SUITE.vc.git_command(SUITE.dir, ["add", "."]) raw_input( "Check that the updated files look as intended, then press RETURN...") SUITE.vc.commit(SUITE.dir, "Update Python inlined files: %s" % import_version) answer = raw_input("Should we push python-import (y/N)? ") if answer and answer in "Yy": SUITE.vc.git_command(SUITE.dir, ["push", "origin", "python-import:python-import"]) SUITE.vc.update(SUITE.dir, rev=tip) SUITE.vc.git_command(SUITE.dir, ["merge", "python-import"])
def graalpython_gate_runner(args, tasks): with Task('GraalPython JUnit', tasks, tags=[GraalPythonTags.junit]) as task: if task: punittest(['--verbose']) with Task('GraalPython Python tests', tasks, tags=[GraalPythonTags.unittest]) as task: if task: gate_unittests() with Task('GraalPython C extension tests', tasks, tags=[GraalPythonTags.cpyext]) as task: if task: # we deliberately added this to test the combination of Sulong and 'mx_unittest' unittest([ '--regex', re.escape( 'com.oracle.graal.python.test.module.MemoryviewTest'), "-Dgraal.TraceTruffleCompilation=true" ]) gate_unittests(subdir="cpyext/") with Task('GraalPython C extension managed tests', tasks, tags=[GraalPythonTags.cpyext_managed]) as task: if task: mx.run_mx([ "--dynamicimports", "sulong-managed", "python-gate-unittests", "--llvm.configuration=managed", "--subdir=cpyext", "--" ]) with Task('GraalPython C extension sandboxed tests', tasks, tags=[GraalPythonTags.cpyext_sandboxed]) as task: if task: mx.run_mx([ "--dynamicimports", "sulong-managed", "python-gate-unittests", "--llvm.configuration=sandboxed", "--subdir=cpyext", "--" ]) with Task('GraalPython Python tests on SVM', tasks, tags=[GraalPythonTags.svmunit]) as task: if task: svm_image_name = "./graalpython-svm" if not os.path.exists(svm_image_name): svm_image_name = python_svm(["-h"]) llvm_home = mx_subst.path_substitutions.substitute( '--native.Dllvm.home=<path:SULONG_LIBS>') args = [ "--python.CoreHome=%s" % os.path.join(SUITE.dir, "graalpython", "lib-graalpython"), "--python.StdLibHome=%s" % os.path.join(SUITE.dir, "graalpython", "lib-python/3"), llvm_home ] run_python_unittests(svm_image_name, args) with Task('GraalPython apptests', tasks, tags=[GraalPythonTags.apptests]) as task: if task: apprepo = os.environ["GRAALPYTHON_APPTESTS_REPO_URL"] _apptest_suite = SUITE.import_suite( "graalpython-apptests", version="1fc0e86a54cbe090d36f262c062d8f4eee8f2e6d", urlinfos=[ mx.SuiteImportURLInfo(mx_urlrewrites.rewriteurl(apprepo), "git", mx.vc_system("git")) ]) mx.run_mx(["-p", _apptest_suite.dir, "graalpython-apptests"]) with Task('GraalPython license header update', tasks, tags=[GraalPythonTags.license]) as task: if task: python_checkcopyrights([]) with Task('GraalPython GraalVM shared-library build', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.graalvm]) as task: if task: run_shared_lib_test() with Task('GraalPython GraalVM build', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.graalvm]) as task: if task: svm_image = python_svm(["--version"]) benchmark = os.path.join(PATH_MESO, "image-magix.py") out = mx.OutputCapture() mx.run([svm_image, benchmark], nonZeroIsFatal=True, out=mx.TeeOutputCapture(out)) success = "\n".join([ "[0, 0, 0, 0, 0, 0, 10, 10, 10, 0, 0, 10, 3, 10, 0, 0, 10, 10, 10, 0, 0, 0, 0, 0, 0]", ]) if success not in out.data: mx.abort('Output from generated SVM image "' + svm_image + '" did not match success pattern:\n' + success) llvm_home = mx_subst.path_substitutions.substitute( '--native.Dllvm.home=<path:SULONG_LIBS>') args = [ "--python.CoreHome=%s" % os.path.join(SUITE.dir, "graalpython", "lib-graalpython"), "--python.StdLibHome=%s" % os.path.join(SUITE.dir, "graalpython", "lib-python/3"), llvm_home ] run_python_unittests(svm_image, args)
def __init__(self, suite, name, deps, workingSets, theLicense, **args): for attr in ['outputDir', 'prefix', 'results']: setattr(self, attr, args.pop(attr)) if getattr(self, attr, None) is None: mx.abort("Missing '{}' attribute".format(attr), context="GraalNodeJsArchiveProject {}".format(name)) mx.ArchivableProject.__init__(self, suite, name, deps, workingSets, theLicense)
def to_int(self, string): fv = float(string) iv = int(fv) if float(iv) != fv: mx.abort("Cannot parse initialization directive: " + string) return iv
def _junit_r_harness(args, vmArgs, jdk, junitArgs): # always pass the directory where the expected output file should reside runlistener_arg = 'expected=' + _test_srcdir() # there should not be any unparsed arguments at this stage if args.remainder: mx.abort('unexpected arguments: ' + str(args.remainder).strip('[]') + '; did you forget --tests') def add_arg_separator(): # can't update in Python 2.7 arg = runlistener_arg if len(arg) > 0: arg += ',' return arg if args.gen_fastr_output: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-fastr=' + args.gen_fastr_output if args.check_expected_output: args.gen_expected_output = True runlistener_arg = add_arg_separator() runlistener_arg += 'check-expected' if args.gen_expected_output: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-expected' if args.keep_trailing_whitespace: runlistener_arg = add_arg_separator() runlistener_arg += 'keep-trailing-whitespace' if args.gen_expected_quiet: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-expected-quiet' if args.gen_diff_output: runlistener_arg = add_arg_separator() runlistener_arg += 'gen-diff=' + args.gen_diff_output if args.trace_tests: runlistener_arg = add_arg_separator() runlistener_arg += 'trace-tests' # if args.test_methods: # runlistener_arg = add_arg_separator() # runlistener_arg = 'test-methods=' + args.test_methods runlistener_arg = add_arg_separator() runlistener_arg += 'test-project-output-dir=' + mx.project( 'com.oracle.truffle.r.test').output_dir() # use a custom junit.RunListener runlistener = 'com.oracle.truffle.r.test.TestBase$RunListener' if len(runlistener_arg) > 0: runlistener += ':' + runlistener_arg junitArgs += ['--runlistener', runlistener] # on some systems a large Java stack seems necessary vmArgs += ['-Xss12m'] # no point in printing errors to file when running tests (that contain errors on purpose) vmArgs += ['-DR:-PrintErrorStacktracesToFile'] vmArgs += set_graal_options(nocompile=True) setREnvironment() return mx.run_java(vmArgs + junitArgs, nonZeroIsFatal=False, jdk=jdk)
def substitution_path(): path = mx.project('com.oracle.svm.bench').classpath_repr() if not mx.exists(path): mx.abort('Path to substitutions for scala dacapo not present: ' + path + '. Did you build all of substratevm?') return path
def _squeak(args, extra_vm_args=None, env=None, jdk=None, **kwargs): """run GraalSqueak""" env = env if env else os.environ vm_args, raw_args = mx.extract_VM_args(args, useDoubleDash=True, defaultAllVMArgs=False) parser = argparse.ArgumentParser(prog='mx squeak') parser.add_argument('-A', '--assertions', help='enable assertion', dest='assertions', action='store_true', default=False) parser.add_argument('-B', '--no-background', help='disable background compilation', dest='background_compilation', action='store_false', default=True) parser.add_argument('-c', '--code', help='Smalltalk code to be executed in headless mode', dest='code') parser.add_argument('--cpusampler', help='enable CPU sampling', dest='cpusampler', action='store_true', default=False) parser.add_argument('--cputracer', help='enable CPU tracing', dest='cputracer', action='store_true', default=False) parser.add_argument('-d', '--disable-interrupts', help='disable interrupt handler', dest='disable_interrupts', action='store_true', default=False) parser.add_argument('-etf', '--enable-transcript-forwarding', help='Forward stdio to Transcript', dest='enable_transcript_forwarding', action='store_true', default=False) parser.add_argument('-fc', '--force-compilation', help='compile immediately to test Truffle compiler', dest='force_compilation', action='store_true', default=False) parser.add_argument('--gc', action='store_true', help='print garbage collection details') parser.add_argument('--graal-options', help='print Graal options', dest='print_graal_options', action='store_true', default=False) parser.add_argument('--headless', help='Run without a display', dest='headless', action='store_true', default=False) parser.add_argument('--igv', action='store_true', help='dump to igv') parser.add_argument('--inspect', help='enable Chrome inspector', dest='inspect', action='store_true', default=False) parser.add_argument('--jdk-ci-time', help='collect timing information for compilation ' '(contains `JVMCI-native` when libgraal is used)', dest='jdk_ci_time', action='store_true', default=False) parser.add_argument('-l', '--low-level', help='enable low-level optimization output', dest='low_level', action='store_true', default=False) parser.add_argument('--log', help='enable TruffleLogger for class, e.g.: ' '"%s.model.ArrayObject=FINER"' % PACKAGE_NAME, dest='log') parser.add_argument('--machine-code', help='print machine code', dest='print_machine_code', action='store_true', default=False) parser.add_argument('--memtracer', help='enable Memory tracing', dest='memtracer', action='store_true', default=False) parser.add_argument('--print-defaults', help='print VM defaults', dest='print_defaults', action='store_true', default=False) parser.add_argument('-tc', '--trace-compilation', help='trace Truffle compilation', dest='trace_compilation', action='store_true', default=False) parser.add_argument('-td', '--trace-deopts', help='trace deoptimizations', dest='deopts', action='store_true', default=False) parser.add_argument( '-ti', '--trace-invalid', help='trace assumption invalidation and transfers to interpreter', dest='trace_invalidation', action='store_true', default=False) parser.add_argument( '-tin', '--trace-inlining', help='print information for inlining for each compilation', dest='trace_inlining', action='store_true', default=False) parser.add_argument('-tio', '--trace-interop', help='trace interop errors, ...', dest='trace_interop', action='store_true', default=False) parser.add_argument('-tif', '--trace-iterate-frames', help='trace iterate frames', dest='trace_iterate_frames', action='store_true', default=False) parser.add_argument('-tpf', '--trace-primitive-failures', help='trace primitive failures', dest='trace_primitive_failures', action='store_true', default=False) parser.add_argument('-tps', '--trace-process-switches', help='trace Squeak process switches, ...', dest='trace_process_switches', action='store_true', default=False) parser.add_argument('-ts', '--trace-splitting', help='print splitting summary on shutdown', dest='trace_splitting', action='store_true', default=False) parser.add_argument('-tcd', '--truffle-compilation-details', help='print Truffle compilation details', dest='truffle_compilation_details', action='store_true', default=False) parser.add_argument( '-tcp', '--truffle-compilation-polymorphism', help='print all polymorphic and generic nodes after each compilation', dest='truffle_compilation_polymorphism', action='store_true', default=False) parser.add_argument( '-tcs', '--truffle-compilation-statistics', help='print Truffle compilation statistics at the end of a run', dest='truffle_compilation_stats', action='store_true', default=False) parser.add_argument('-teh', '--truffle-expansion-histogram', help='print a histogram of all expanded Java methods', dest='truffle_expansion_histogram', action='store_true', default=False) parser.add_argument( '-tib', '--truffle-instrument-boundaries', help='instrument Truffle boundaries and output profiling information', dest='truffle_instrument_boundaries', action='store_true', default=False) parser.add_argument('-v', '--verbose', help='enable verbose output', dest='verbose', action='store_true', default=False) parser.add_argument('-w', '--perf-warnings', help='enable performance warnings', dest='perf_warnings', action='store_true', default=False) parser.add_argument('image', help='path to Squeak image file', nargs='?') parser.add_argument('image_arguments', help='image arguments', nargs=argparse.REMAINDER) parsed_args = parser.parse_args(raw_args) vm_args = BASE_VM_ARGS + _get_runtime_jvm_args(jdk) if _compiler: vm_args += _graal_vm_args(parsed_args) # default: assertion checking is enabled if parsed_args.assertions: vm_args += ['-ea', '-esa'] if parsed_args.gc: vm_args += ['-XX:+PrintGC', '-XX:+PrintGCDetails'] if parsed_args.jdk_ci_time: vm_args.append('-XX:+CITime') if parsed_args.print_defaults: vm_args.append('-XX:+PrintFlagsFinal') if extra_vm_args: vm_args += extra_vm_args if parsed_args.code: vm_args.append('-Djava.awt.headless=true') vm_args.append('%s.launcher.GraalSqueakLauncher' % PACKAGE_NAME) squeak_arguments = [] if parsed_args.disable_interrupts: squeak_arguments.append('--%s.DisableInterruptHandler' % LANGUAGE_ID) if parsed_args.headless: squeak_arguments.append('--%s.Headless' % LANGUAGE_ID) if parsed_args.code: squeak_arguments.extend(['--code', parsed_args.code]) if parsed_args.cpusampler: squeak_arguments.append('--cpusampler') if parsed_args.cputracer: squeak_arguments.append('--cputracer') if parsed_args.enable_transcript_forwarding: squeak_arguments.append('--enable-transcript-forwarding') if parsed_args.inspect: squeak_arguments.append('--inspect') if parsed_args.trace_interop: parsed_args.log = ('%s.nodes.plugins.PolyglotPlugin=FINE' % PACKAGE_NAME) if parsed_args.trace_iterate_frames: parsed_args.log = ('%s.util.FrameAccess=FINE' % PACKAGE_NAME) if parsed_args.trace_primitive_failures: parsed_args.log = ( '%s.nodes.bytecodes.MiscellaneousBytecodes$CallPrimitiveNode=FINE' % PACKAGE_NAME) if parsed_args.trace_process_switches: parsed_args.log = ('%s.nodes.ExecuteTopLevelContextNode=FINE' % PACKAGE_NAME) if parsed_args.log: split = parsed_args.log.split("=") if len(split) != 2: mx.abort('Must be in the format de.hpi.swa.graal...Class=LOGLEVEL') squeak_arguments.append('--log.%s.%s.level=%s' % (LANGUAGE_ID, split[0], split[1])) if parsed_args.memtracer: squeak_arguments.extend(['--experimental-options', '--memtracer']) squeak_arguments.append('--polyglot') # enable polyglot mode by default if parsed_args.image: squeak_arguments.append(parsed_args.image) else: if len(squeak_arguments) > 1: parser.error('an image needs to be explicitly provided') if parsed_args.image_arguments: squeak_arguments.extend(parsed_args.image_arguments) if not jdk: jdk = mx.get_jdk(tag='jvmci' if _compiler else None) return mx.run_java(vm_args + squeak_arguments, jdk=jdk, **kwargs)
def make_java_module(dist, jdk): """ Creates a Java module from a distribution. :param JARDistribution dist: the distribution from which to create a module :param JDKConfig jdk: a JDK with a version >= 9 that can be used to compile the module-info class :return: the `JavaModuleDescriptor` for the created Java module """ info = get_java_module_info(dist) if info is None: return None moduleName, moduleDir, moduleJar = info # pylint: disable=unpacking-non-sequence mx.log('Building Java module ' + moduleName + ' from ' + dist.name) exports = {} requires = {} concealedRequires = {} uses = set() modulepath = list() usedModules = set() if dist.suite.getMxCompatibility().moduleDepsEqualDistDeps(): moduledeps = dist.archived_deps() for dep in mx.classpath_entries(dist, includeSelf=False): if dep.isJARDistribution(): jmd = as_java_module( dep, jdk, fatalIfNotCreated=False) or make_java_module( dep, jdk) modulepath.append(jmd) requires[jmd.name] = {jdk.get_transitive_requires_keyword()} elif (dep.isJdkLibrary() or dep.isJreLibrary()) and dep.is_provided_by(jdk): pass else: mx.abort(dist.name + ' cannot depend on ' + dep.name + ' as it does not define a module') else: moduledeps = get_module_deps(dist) # Append JDK modules to module path jdkModules = jdk.get_modules() if not isinstance(jdkModules, list): jdkModules = list(jdkModules) allmodules = modulepath + jdkModules javaprojects = [d for d in moduledeps if d.isJavaProject()] # Collect packages in the module first packages = set() for dep in javaprojects: packages.update(dep.defined_java_packages()) for dep in javaprojects: uses.update(getattr(dep, 'uses', [])) for pkg in getattr(dep, 'runtimeDeps', []): requires.setdefault(pkg, set(['static'])) for pkg in itertools.chain( dep.imported_java_packages(projectDepsOnly=False), getattr(dep, 'imports', [])): # Only consider packages not defined by the module we're creating. This handles the # case where we're creating a module that will upgrade an existing upgradeable # module in the JDK such as jdk.internal.vm.compiler. if pkg not in packages: depModule, visibility = lookup_package(allmodules, pkg, moduleName) if depModule and depModule.name != moduleName: requires.setdefault(depModule.name, set()) if visibility == 'exported': # A distribution based module does not re-export its imported JDK packages usedModules.add(depModule) else: assert visibility == 'concealed' concealedRequires.setdefault(depModule.name, set()).add(pkg) usedModules.add(depModule) # If an "exports" attribute is not present, all packages are exported for package in _expand_package_info( dep, getattr(dep, 'exports', dep.defined_java_packages())): exports.setdefault(package, []) provides = {} if exists(moduleDir): shutil.rmtree(moduleDir) for d in [dist] + [md for md in moduledeps if md.isJARDistribution()]: if d.isJARDistribution(): with zipfile.ZipFile(d.path, 'r') as zf: # To compile module-info.java, all classes it references must either be given # as Java source files or already exist as class files in the output directory. # As such, the jar file for each constituent distribution must be unpacked # in the output directory. zf.extractall(path=moduleDir) names = frozenset(zf.namelist()) # Flatten versioned resources versionsDir = join(moduleDir, 'META-INF', 'versions') if exists(versionsDir): versionedRE = re.compile( r'META-INF/versions/([1-9][0-9]*)/(.+)') versions = {} for arcname in sorted(names): m = versionedRE.match(arcname) if m: version = int(m.group(1)) unversionedName = m.group(2) versions.setdefault( version, {})[unversionedName] = zf.read(arcname) for version, resources in sorted(versions.iteritems()): for unversionedName, content in resources.iteritems(): dst = join(moduleDir, unversionedName) if version <= jdk.javaCompliance.value: parent = dirname(dst) if parent and not exists(parent): os.makedirs(parent) with open(dst, 'wb') as fp: fp.write(content) else: # Ignore resource whose version is too high pass shutil.rmtree(versionsDir) manifest = join(moduleDir, 'META-INF/MANIFEST.MF') # Remove Multi-Release attribute from manifest as the jar # is now flattened. This is also a workaround for # https://bugs.openjdk.java.net/browse/JDK-8193802 if exists(manifest): with open(manifest) as fp: content = fp.readlines() newContent = [ l for l in content if not 'Multi-Release:' in l ] if newContent != content: with open(manifest, 'w') as fp: fp.write(''.join(newContent)) serviceRE = re.compile(r'META-INF/services/(.+)') for arcname in names: m = serviceRE.match(arcname) if m: service = m.group(1) # While a META-INF provider configuration file must use a fully qualified binary # name[1] of the service, a provides directive in a module descriptor must use # the fully qualified non-binary name[2] of the service. # # [1] https://docs.oracle.com/javase/9/docs/api/java/util/ServiceLoader.html # [2] https://docs.oracle.com/javase/9/docs/api/java/lang/module/ModuleDescriptor.Provides.html#service-- service = service.replace('$', '.') assert '/' not in service provides.setdefault(service, set()).update( zf.read(arcname).splitlines()) # Service types defined in the module are assumed to be used by the module serviceClass = service.replace('.', '/') + '.class' if serviceClass in names: uses.add(service) servicesDir = join(moduleDir, 'META-INF', 'services') if exists(servicesDir): shutil.rmtree(servicesDir) jmd = JavaModuleDescriptor(moduleName, exports, requires, uses, provides, packages=packages, concealedRequires=concealedRequires, jarpath=moduleJar, dist=dist, modulepath=modulepath) # Compile module-info.class moduleInfo = join(moduleDir, 'module-info.java') with open(moduleInfo, 'w') as fp: print >> fp, jmd.as_module_info() javacCmd = [jdk.javac, '-d', moduleDir] jdkModuleNames = [m.name for m in jdkModules] modulepathJars = [ m.jarpath for m in jmd.modulepath if m.jarpath and m.name not in jdkModuleNames ] upgrademodulepathJars = [ m.jarpath for m in jmd.modulepath if m.jarpath and m.name in jdkModuleNames ] if modulepathJars: javacCmd.append('--module-path') javacCmd.append(os.pathsep.join(modulepathJars)) if upgrademodulepathJars: javacCmd.append('--upgrade-module-path') javacCmd.append(os.pathsep.join(upgrademodulepathJars)) if concealedRequires: for module, packages in concealedRequires.iteritems(): for package in packages: javacCmd.append('--add-exports=' + module + '/' + package + '=' + moduleName) javacCmd.append(moduleInfo) mx.run(javacCmd) # Create the module jar shutil.make_archive(moduleJar, 'zip', moduleDir) os.rename(moduleJar + '.zip', moduleJar) jmd.save() return jmd
def gate(args): """run the tests used to validate a push to the stable Maxine repository If this commands exits with a 0 exit code, then the source code is in a state that would be accepted for integration into the main repository.""" check = True testArgs = [] i = 0 while i < len(args): arg = args[i] if arg == '-nocheck': check = False else: testArgs += [arg] i += 1 mx._opts.specific_suites = ["maxine"] if check: if mx.checkstyle([]): mx.abort('Checkstyle warnings were found') if exists(join(_maxine_home, '.git')): # Copyright check depends on the sources being in a git repo mx.log('Running checkcopyrights') if checkcopyrights(['--modified', '--report-errors']): mx.abort('Copyright issues were found') mx.log('Ensuring JavaTester harness is up to date') try: jttgen([]) except SystemExit: mx.log( 'Updated JavaTesterRunScheme.java or JavaTesterTests.java in com.sun.max.vm.jtrun.all.' ) mx.log( 'To push your changes to the repository, these files need to be generated locally and checked in.' ) mx.log('The files can be generated by running: mx jttgen') mx.abort(1) mx.log('Ensuring mx/suite.py files are canonicalized') try: mx.canonicalizeprojects([]) except SystemExit: mx.log( 'Rerun "mx canonicalizeprojects" and check-in the modified mx/suite.py files.' ) mx.abort(1) mx.log('Running MaxineTester...') testme([ '-image-configs=java', '-maxvm-configs=std,forceC1X,forceT1X', '-jtt-image-configs=jtt-c1xc1x,jtt-t1xc1x,jtt-c1xt1x,jtt-t1xt1x', '-tests=c1x,junit:uk.ac+tests.unsafe+tests.vm+max.l+max.c+max.u+max.i+max.M+max.p,jsr292,output,javatester' ] + testArgs) testme([ '-image-configs=ss', '-tests=output:Hello+Catch+GC+WeakRef+Final', '-fail-fast' ] + testArgs)
def _mxrun(args, cwd=_suite.dir, verbose=False, out=None): if verbose: mx.log('Running \'{}\''.format(' '.join(args))) status = mx.run(args, nonZeroIsFatal=False, cwd=cwd, out=out) if status: mx.abort(status)
def get_jdk(self): tag = mx.get_jdk_option().tag if tag and tag != mx_compiler._JVMCI_JDK_TAG: mx.abort("The '{0}/{1}' VM requires '--jdk={2}'".format( self.name(), self.config_name(), mx_compiler._JVMCI_JDK_TAG)) return mx.get_jdk(tag=mx_compiler._JVMCI_JDK_TAG)
def _test_libgraal_fatal_error_handling(): """ Tests that fatal errors in libgraal route back to HotSpot fatal error handling. """ vmargs = [ '-XX:+PrintFlagsFinal', '-Dlibgraal.CrashAt=length,hashCode', '-Dlibgraal.CrashAtIsFatal=true' ] cmd = ["dacapo:avrora", "--tracker=none", "--" ] + vmargs + ["--", "--preserve"] out = mx.OutputCapture() exitcode, bench_suite, _ = mx_benchmark.gate_mx_benchmark( cmd, out=out, err=out, nonZeroIsFatal=False) if exitcode == 0: if 'CrashAtIsFatal: no fatalError function pointer installed' in out.data: # Executing a VM that does not configure fatal errors handling # in libgraal to route back through the VM. pass else: mx.abort('Expected benchmark to result in non-zero exit code: ' + ' '.join(cmd) + linesep + out.data) else: if len(bench_suite.scratchDirs()) == 0: mx.abort("No scratch dir found despite error being expected!") latest_scratch_dir = bench_suite.scratchDirs()[-1] seen_libjvmci_log = False hs_errs = glob.glob(join(latest_scratch_dir, 'hs_err_pid*.log')) if not hs_errs: mx.abort( 'Expected a file starting with "hs_err_pid" in test directory. Entries found=' + str(listdir(latest_scratch_dir))) for hs_err in hs_errs: mx.log("Verifying content of {}".format( join(latest_scratch_dir, hs_err))) with open(join(latest_scratch_dir, hs_err)) as fp: contents = fp.read() if 'libjvmci' in hs_err: seen_libjvmci_log = True if 'Fatal error: Forced crash' not in contents: mx.abort( 'Expected "Fatal error: Forced crash" to be in contents of ' + hs_err + ':' + linesep + contents) else: if 'Fatal error in JVMCI' not in contents: mx.abort( 'Expected "Fatal error in JVMCI" to be in contents of ' + hs_err + ':' + linesep + contents) if 'JVMCINativeLibraryErrorFile' in out.data and not seen_libjvmci_log: mx.abort( 'Expected a file matching "hs_err_pid*_libjvmci.log" in test directory. Entries found=' + str(listdir(latest_scratch_dir))) # Only clean up scratch dir on success for scratch_dir in bench_suite.scratchDirs(): mx.log("Cleaning up scratch dir after gate task completion: {}".format( scratch_dir)) mx.rmtree(scratch_dir)
def maven_plugin_install(args): parser = ArgumentParser(prog='mx maven-plugin-install') parser.add_argument( "--deploy-dependencies", action='store_true', help= "This will deploy all the artifacts from all suites before building and deploying the plugin" ) parser.add_argument( '--licenses', help= 'Comma-separated list of licenses that are cleared for upload. Only used if no url is given. Otherwise licenses are looked up in suite.py' ) parser.add_argument('--gpg', action='store_true', help='Sign files with gpg before deploying') parser.add_argument( '--gpg-keyid', help='GPG keyid to use when signing files (implies --gpg)', default=None) parser.add_argument( 'repository_id', metavar='repository-id', nargs='?', action='store', help= 'Repository ID used for binary deploy. If none is given, mavens local repository is used instead.' ) parser.add_argument( 'url', metavar='repository-url', nargs='?', action='store', help= 'Repository URL used for binary deploy. If no url is given, the repository-id is looked up in suite.py' ) parsed = parser.parse_args(args) if not suite.isSourceSuite(): raise mx.abort( "maven-plugin-install requires {} to be a source suite, no a binary suite" .format(suite.name)) if parsed.url: if parsed.licenses: licenses = mx.get_license(parsed.licenses.split(',')) elif parsed.repository_id: licenses = mx.repository(parsed.repository_id).licenses else: licenses = [] repo = mx.Repository(suite, parsed.repository_id, parsed.url, parsed.url, licenses) elif parsed.repository_id: repo = mx.repository(parsed.repository_id) else: repo = mx.maven_local_repository() svm_version = suite.release_version(snapshotSuffix='SNAPSHOT') if parsed.deploy_dependencies: deploy_args = [ '--suppress-javadoc', '--all-distribution-types', '--validate=full', '--all-suites', ] if parsed.licenses: deploy_args += ["--licenses", parsed.licenses] if parsed.gpg: deploy_args += ["--gpg"] if parsed.gpg_keyid: deploy_args += ["--gpg-keyid", parsed.gpg_keyid] if parsed.repository_id: deploy_args += [parsed.repository_id] if parsed.url: deploy_args += [parsed.url] mx.maven_deploy(deploy_args) deploy_native_image_maven_plugin(svm_version, repo, parsed.gpg, parsed.gpg_keyid) success_message = [ '', 'Use the following plugin snippet to enable native-image building for your maven project:', '', '<plugin>', ' <groupId>com.oracle.substratevm</groupId>', ' <artifactId>native-image-maven-plugin</artifactId>', ' <version>' + svm_version + '</version>', ' <executions>', ' <execution>', ' <goals>', ' <goal>native-image</goal>', ' </goals>', ' <phase>package</phase>', ' </execution>', ' </executions>', '</plugin>', '', ] mx.log('\n'.join(success_message))
def truffle_language_ensure(language_flag, version=None, native_image_root=None, early_exit=False, extract=True): """ Ensures that we have a valid suite for the given language_flag, by downloading a binary if necessary and providing the suite distribution artifacts in the native-image directory hierachy (via symlinks). :param language_flag: native-image language_flag whose truffle-language we want to use :param version: if not specified and no TRUFFLE_<LANG>_VERSION set latest binary deployed master revision gets downloaded :param native_image_root: the native_image_root directory where the the artifacts get installed to :return: language suite for the given language_flag """ if not native_image_root: native_image_root = suite_native_image_root() version_env_var = 'TRUFFLE_' + language_flag.upper() + '_VERSION' if not version and os.environ.has_key(version_env_var): version = os.environ[version_env_var] if language_flag not in flag_suitename_map: mx.abort('No truffle-language uses language_flag \'' + language_flag + '\'') language_dir = join('languages', language_flag) if early_exit and exists(join(native_image_root, language_dir)): mx.logv('Early exit mode: Language subdir \'' + language_flag + '\' exists. Skip suite.import_suite.') return None language_entry = flag_suitename_map[language_flag] language_suite_name = language_entry[0] language_repo_name = language_entry[3] if len(language_entry) > 3 else None urlinfos = [ mx.SuiteImportURLInfo( mx_urlrewrites.rewriteurl( 'https://curio.ssw.jku.at/nexus/content/repositories/snapshots' ), 'binary', mx.vc_system('binary')) ] failure_warning = None if not version and not mx.suite(language_suite_name, fatalIfMissing=False): # If no specific version requested use binary import of last recently deployed master version repo_suite_name = language_repo_name if language_repo_name else language_suite_name repo_url = mx_urlrewrites.rewriteurl( 'https://github.com/graalvm/{0}.git'.format(repo_suite_name)) version = mx.SuiteImport.resolve_git_branchref(repo_url, 'binary', abortOnError=False) if not version: failure_warning = 'Resolving \'binary\' against ' + repo_url + ' failed' language_suite = suite.import_suite(language_suite_name, version=version, urlinfos=urlinfos, kind=None, in_subdir=bool(language_repo_name)) if not language_suite: if failure_warning: mx.warn(failure_warning) mx.abort('Binary suite not found and no local copy of ' + language_suite_name + ' available.') if not extract: if not exists(join(native_image_root, language_dir)): mx.abort('Language subdir \'' + language_flag + '\' should already exist with extract=False') return language_suite language_suite_depnames = language_entry[1] language_deps = language_suite.dists + language_suite.libs language_deps = [ dep for dep in language_deps if dep.name in language_suite_depnames ] native_image_layout(language_deps, language_dir, native_image_root) language_suite_nativedistnames = language_entry[2] language_nativedists = [ dist for dist in language_suite.dists if dist.name in language_suite_nativedistnames ] native_image_extract(language_nativedists, language_dir, native_image_root) option_properties = join(language_suite.mxDir, 'native-image.properties') target_path = remove_existing_symlink( join(native_image_root, language_dir, 'native-image.properties')) if exists(option_properties): if not exists(target_path): mx.logv('Add symlink to ' + str(option_properties)) symlink_or_copy(option_properties, target_path) else: native_image_option_properties('languages', language_flag, native_image_root) return language_suite
def make_java_module(dist, jdk): """ Creates a Java module from a distribution. :param JARDistribution dist: the distribution from which to create a module :param JDKConfig jdk: a JDK with a version >= 9 that can be used to compile the module-info class :return: the `JavaModuleDescriptor` for the created Java module """ info = get_java_module_info(dist) if info is None: return None moduleName, moduleDir, moduleJar = info # pylint: disable=unpacking-non-sequence mx.log('Building Java module ' + moduleName + ' from ' + dist.name) exports = {} requires = {} concealedRequires = {} uses = set() modulepath = list() usedModules = set() if dist.suite.getMxCompatibility().moduleDepsEqualDistDeps(): moduledeps = dist.archived_deps() for dep in mx.classpath_entries(dist, includeSelf=False): jmd = make_java_module(dep, jdk) if dep.isJARDistribution() else None if jmd: modulepath.append(jmd) requires[jmd.name] = set( [jdk.get_transitive_requires_keyword()]) elif (dep.isJdkLibrary() or dep.isJreLibrary()) and dep.is_provided_by(jdk): pass else: mx.abort(dist.name + ' cannot depend on ' + dep.name + ' as it does not define a module') else: moduledeps = get_module_deps(dist) # Append JDK modules to module path jdkModules = jdk.get_modules() if not isinstance(jdkModules, list): jdkModules = list(jdkModules) allmodules = modulepath + jdkModules javaprojects = [d for d in moduledeps if d.isJavaProject()] # Collect packages in the module first packages = set() for dep in javaprojects: packages.update(dep.defined_java_packages()) for dep in javaprojects: uses.update(getattr(dep, 'uses', [])) for pkg in itertools.chain( dep.imported_java_packages(projectDepsOnly=False), getattr(dep, 'imports', [])): # Only consider packages not defined by the module we're creating. This handles the # case where we're creating a module that will upgrade an existing upgradeable # module in the JDK such as jdk.internal.vm.compiler. if pkg not in packages: depModule, visibility = lookup_package(allmodules, pkg, moduleName) if depModule and depModule.name != moduleName: requires.setdefault(depModule.name, set()) if visibility == 'exported': # A distribution based module does not re-export its imported JDK packages usedModules.add(depModule) else: assert visibility == 'concealed' concealedRequires.setdefault(depModule.name, set()).add(pkg) usedModules.add(depModule) # If an "exports" attribute is not present, all packages are exported for package in _expand_package_info( dep, getattr(dep, 'exports', dep.defined_java_packages())): exports.setdefault(package, []) provides = {} if exists(moduleDir): shutil.rmtree(moduleDir) for d in [dist] + [md for md in moduledeps if md.isJARDistribution()]: if d.isJARDistribution(): with zipfile.ZipFile(d.path, 'r') as zf: # To compile module-info.java, all classes it references must either be given # as Java source files or already exist as class files in the output directory. # As such, the jar file for each constituent distribution must be unpacked # in the output directory. zf.extractall(path=moduleDir) names = frozenset(zf.namelist()) for arcname in names: if arcname.startswith( 'META-INF/services/' ) and not arcname == 'META-INF/services/': service = arcname[len('META-INF/services/'):] assert '/' not in service provides.setdefault(service, set()).update( zf.read(arcname).splitlines()) # Service types defined in the module are assumed to be used by the module serviceClass = service.replace('.', '/') + '.class' if serviceClass in names: uses.add(service) jmd = JavaModuleDescriptor(moduleName, exports, requires, uses, provides, packages=packages, concealedRequires=concealedRequires, jarpath=moduleJar, dist=dist, modulepath=modulepath) # Compile module-info.class moduleInfo = join(moduleDir, 'module-info.java') with open(moduleInfo, 'w') as fp: print >> fp, jmd.as_module_info() javacCmd = [jdk.javac, '-d', moduleDir] jdkModuleNames = [m.name for m in jdkModules] modulepathJars = [ m.jarpath for m in jmd.modulepath if m.jarpath and m.name not in jdkModuleNames ] upgrademodulepathJars = [ m.jarpath for m in jmd.modulepath if m.jarpath and m.name in jdkModuleNames ] if modulepathJars: javacCmd.append('--module-path') javacCmd.append(os.pathsep.join(modulepathJars)) if upgrademodulepathJars: javacCmd.append('--upgrade-module-path') javacCmd.append(os.pathsep.join(upgrademodulepathJars)) javacCmd.append(moduleInfo) mx.run(javacCmd) # Create the module jar shutil.make_archive(moduleJar, 'zip', moduleDir) os.rename(moduleJar + '.zip', moduleJar) jmd.save() return jmd
def llvm_tool(args=None, out=None): if len(args) < 1: mx.abort("usage: mx llvm-tool <llvm-tool> [args...]") llvm_program = findBundledLLVMProgram(args[0]) mx.run([llvm_program] + args[1:], out=out)
def register_known_vm(name): if name in _known_vms: raise mx.abort("VM '{}' already registered".format(name)) _known_vms.add(name)