def _unittest_config_participant_tck(config): def create_filter(requiredResource): def has_resource(jar): with zipfile.ZipFile(jar, "r") as zf: try: zf.getinfo(requiredResource) except KeyError: return False else: return True return has_resource def import_visitor(suite, suite_import, predicate, collector, javaProperties, seenSuites, **extra_args): suite_collector(mx.suite(suite_import.name), predicate, collector, javaProperties, seenSuites) def suite_collector(suite, predicate, collector, javaProperties, seenSuites): if suite.name in seenSuites: return seenSuites.add(suite.name) suite.visit_imports(import_visitor, predicate=predicate, collector=collector, javaProperties=javaProperties, seenSuites=seenSuites) for dist in suite.dists: if dist.isJARDistribution() and exists(dist.path) and predicate(dist.path): for distCpEntry in mx.classpath_entries(dist): if hasattr(distCpEntry, "getJavaProperties"): for key, value in dist.getJavaProperties().items(): javaProperties[key] = value if distCpEntry.isJdkLibrary() or distCpEntry.isJreLibrary(): cpPath = distCpEntry.classpath_repr(mx.get_jdk(), resolve=True) else: cpPath = distCpEntry.classpath_repr(resolve=True) if cpPath: collector[cpPath] = None javaPropertiesToAdd = OrderedDict() providers = OrderedDict() suite_collector(mx.primary_suite(), create_filter("META-INF/services/org.graalvm.polyglot.tck.LanguageProvider"), providers, javaPropertiesToAdd, set()) languages = OrderedDict() suite_collector(mx.primary_suite(), create_filter("META-INF/truffle/language"), languages, javaPropertiesToAdd, set()) vmArgs, mainClass, mainClassArgs = config cpIndex, cpValue = mx.find_classpath_arg(vmArgs) cpBuilder = OrderedDict() if cpValue: for cpElement in cpValue.split(os.pathsep): cpBuilder[cpElement] = None for langCpElement in languages: cpBuilder[langCpElement] = None for providerCpElement in providers: cpBuilder[providerCpElement] = None cpValue = os.pathsep.join((e for e in cpBuilder)) if cpIndex: vmArgs[cpIndex] = cpValue else: vmArgs.append("-cp") vmArgs.append(cpValue) for key, value in javaPropertiesToAdd.items(): vmArgs.append("-D" + key + "=" + value) return (vmArgs, mainClass, mainClassArgs)
def jackpot(args, suite=None, nonZeroIsFatal=False): """run Jackpot 3.0 against non-test Java projects""" jackpotHome = mx.get_env('JACKPOT_HOME', None) if jackpotHome: jackpotJar = join(jackpotHome, 'jackpot.jar') else: jackpotJar = mx.library('JACKPOT').get_path(resolve=True) assert exists(jackpotJar) if suite is None: suite = mx.primary_suite() nonTestProjects = [p for p in mx.projects() if _should_test_project(p)] if not nonTestProjects: return 0 groups = [] for p in nonTestProjects: javacClasspath = [] deps = [] p.walk_deps(visit=lambda dep, edge: deps.append(dep) if dep.isLibrary() or dep.isJavaProject() else None) annotationProcessorOnlyDeps = [] if len(p.annotation_processors()) > 0: for apDep in p.annotation_processors(): if not apDep in deps: deps.append(apDep) annotationProcessorOnlyDeps.append(apDep) for dep in deps: if dep == p: continue if dep in annotationProcessorOnlyDeps: continue javacClasspath.append(dep.classpath_repr(resolve=True)) sourceLevel = min(p.javaCompliance.value, 9) groups = groups + [ '--group', "--classpath " + mx._separatedCygpathU2W( _escape_string(os.pathsep.join(javacClasspath))) + " --source " + str(sourceLevel) + " " + " ".join([_escape_string(d) for d in p.source_dirs()]) ] cmd = [ '-classpath', mx._cygpathU2W(jackpotJar), 'org.netbeans.modules.jackpot30.cmdline.Main' ] cmd = cmd + ['--fail-on-warnings', '--progress'] + args + groups jdk = mx.get_jdk(mx.JavaCompliance("8"), cancel='cannot run Jackpot', purpose="run Jackpot") if jdk is None: mx.warn('Skipping Jackpot since JDK 8 is not available') return 0 else: return mx.run_java(cmd, nonZeroIsFatal=nonZeroIsFatal, jdk=jdk)
def with_branch_and_commit_dict(self, d): """ We run our benchmark from the graalpython directories, but with other suites as primary suites in the CI, so we potentially want to update branch and commit info. """ if mx.primary_suite().dir != os.getcwd(): if any( os.path.isdir(d) and d.startswith("mx.graalpython") for d in os.listdir()): vc = SUITE.vc if vc is None: return d branch = vc.active_branch(SUITE.dir, abortOnError=False) or "<unknown>" info = vc.parent_info(SUITE.dir) url = vc.default_pull(SUITE.dir, abortOnError=False) or "unknown" d.update({ "branch": branch, "commit.rev": vc.parent(SUITE.dir), "commit.repo-url": url, "commit.author": info["author"], "commit.author-ts": info["author-ts"], "commit.committer": info["committer"], "commit.committer-ts": info["committer-ts"], }) return d
def verify_jvmci_ci_versions(args=None, extraVMarguments=None): version_pattern = re.compile(r'^(?!\s*#).*jvmci-(?P<version>\d*\.\d*)') def _grep_version(files, msg): version = None last = None linenr = 0 for filename in files: for line in open(filename): m = version_pattern.search(line) if m: new_version = m.group('version') if version and version != new_version: mx.abort( os.linesep.join([ "Multiple JVMCI versions found in {0} files:".format(msg), " {0} in {1}:{2}: {3}".format(version, *last), " {0} in {1}:{2}: {3}".format(new_version, filename, linenr, line), ])) last = (filename, linenr, line.rstrip()) version = new_version linenr = linenr + 1 if not version: mx.abort("No JVMCI version found in {0} files!".format(msg)) return version hocon_version = _grep_version(glob.glob(join(mx.primary_suite().dir, 'ci*.hocon')) + glob.glob(join(mx.primary_suite().dir, 'ci*/*.hocon')), 'ci.hocon') travis_version = _grep_version(glob.glob('.travis.yml'), 'TravisCI') if hocon_version != travis_version: mx.abort("Travis and ci.hocon JVMCI versions do not match: {0} vs. {1}".format(travis_version, hocon_version)) mx.log('JVMCI versions are ok!')
def testdownstream_cli(args): """tests a downstream repo against the current working directory state of the primary suite Multiple repos can be specified with multiple instances of the -R/--repo option. The first specified repo is the one being tested. Further repos can be specified to either override where suites are cloned from or to satisfy --dynamicimports. """ parser = ArgumentParser(prog='mx testdownstream') parser.add_argument('-R', '--repo', dest='repos', action='append', help='URL of downstream repo to clone. First specified repo is the primary repo being tested', required=True, metavar='<url>', default=[]) parser.add_argument('--suitedir', action='store', help='relative directory of suite to test in primary repo (default: . )', default='.', metavar='<path>') parser.add_argument('--downstream-branch', action='store', help='name of branch to look for in downstream repo(s). ' 'Can be specified by DOWNSTREAM_BRANCH environment variable. If not specified, current branch of the primary suite is used.', metavar='<name>') parser.add_argument('-C', '--mx-command', dest='mxCommands', action='append', help='arguments to an mx command run in primary repo suite (e.g., -C "-v --strict-compliance gate")', default=[], metavar='<args>') parser.add_argument('-E', '--encoded-space', help='character used to encode a space in an mx command argument. Each instance of this character in an argument will be replaced with a space.', metavar='<char>') args = parser.parse_args(args) mxCommands = [] for command in [e.split() for e in args.mxCommands]: if args.encoded_space: command = [arg.replace(args.encoded_space, ' ') for arg in command] mxCommands.append(command) branch = args.downstream_branch or mx.get_env('DOWNSTREAM_BRANCH', None) return testdownstream(mx.primary_suite(), args.repos, args.suitedir, mxCommands, branch)
def _collect_class_path_entries(cp_entries_filter, entries_collector, properties_collector): def import_visitor(suite, suite_import, predicate, collector, javaProperties, seenSuites, **extra_args): suite_collector(mx.suite(suite_import.name), predicate, collector, javaProperties, seenSuites) def suite_collector(suite, predicate, collector, javaProperties, seenSuites): if suite.name in seenSuites: return seenSuites.add(suite.name) suite.visit_imports(import_visitor, predicate=predicate, collector=collector, javaProperties=javaProperties, seenSuites=seenSuites) for dist in suite.dists: if predicate(dist): for distCpEntry in mx.classpath_entries(dist): if hasattr(distCpEntry, "getJavaProperties"): for key, value in distCpEntry.getJavaProperties( ).items(): javaProperties[key] = value if distCpEntry.isJdkLibrary() or distCpEntry.isJreLibrary( ): cpPath = distCpEntry.classpath_repr(mx.get_jdk(), resolve=True) else: cpPath = distCpEntry.classpath_repr(resolve=True) if cpPath: collector[cpPath] = None suite_collector(mx.primary_suite(), cp_entries_filter, entries_collector, properties_collector, set())
def get_suite(name): suite_name = name.lstrip('/') suite = mx.suite(suite_name, fatalIfMissing=False) if not suite: suite = mx.primary_suite().import_suite(suite_name, version=None, urlinfos=None, in_subdir=name.startswith('/')) assert suite return suite
def update_import(name, rev="origin/master", callback=None): primary = mx.primary_suite() dep_dir = mx.suite(name).vc_dir vc = mx.VC.get_vc(dep_dir) vc.pull(dep_dir, update=False) vc.update(dep_dir, rev=rev) tip = str(vc.tip(dep_dir)).strip() contents = None suitefile = os.path.join(primary.dir, "mx." + primary.name, "suite.py") with open(suitefile, 'r') as f: contents = f.read() dep_re = re.compile( "['\"]name['\"]:\s+['\"]%s['\"],\s+['\"]version['\"]:\s+['\"]([a-z0-9]+)['\"]" % name, re.MULTILINE) dep_match = dep_re.search(contents) if dep_match: start = dep_match.start(1) end = dep_match.end(1) assert end - start == len(tip) mx.update_file(suitefile, "".join([contents[:start], tip, contents[end:]]), showDiff=True) if callback: callback() else: mx.abort("%s not found in %s" % (name, suitefile))
def jackpot(args, suite=None, nonZeroIsFatal=False): """run Jackpot 11.1 against non-test Java projects""" jackpotHome = mx.get_env('JACKPOT_HOME', None) if jackpotHome: jackpotJar = join(jackpotHome, 'jackpot.jar') else: jackpotJar = mx.library('JACKPOT').get_path(resolve=True) assert exists(jackpotJar) if suite is None: suite = mx.primary_suite() nonTestProjects = [p for p in mx.projects() if _should_test_project(p)] if not nonTestProjects: return 0 groups = [] for p in nonTestProjects: javacClasspath = [] deps = [] p.walk_deps(visit=lambda dep, edge: deps.append(dep) if dep.isLibrary() or dep.isJavaProject() else None) annotationProcessorOnlyDeps = [] if len(p.annotation_processors()) > 0: for apDep in p.annotation_processors(): if not apDep in deps: deps.append(apDep) annotationProcessorOnlyDeps.append(apDep) for dep in deps: if dep == p: continue if dep in annotationProcessorOnlyDeps: continue javacClasspath.append(dep.classpath_repr(resolve=True)) sourceLevel = min(p.javaCompliance.value, 9) groups = groups + ['--group', "--classpath " + mx._separatedCygpathU2W(_escape_string(os.pathsep.join(javacClasspath))) + " --source " + str(sourceLevel) + " " + " ".join([_escape_string(d) for d in p.source_dirs()])] cmd = ['--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED', '--add-opens=java.base/java.net=ALL-UNNAMED', '--add-opens=java.desktop/sun.awt=ALL-UNNAMED'] cmd = cmd + ['-classpath', mx._cygpathU2W(jackpotJar), 'org.netbeans.modules.jackpot30.cmdline.Main'] jackCmd = ['--fail-on-warnings', '--progress'] + args + groups jdk = mx.get_jdk(mx.JavaCompliance("11+"), cancel='cannot run Jackpot', purpose="run Jackpot") if jdk is None: mx.warn('Skipping Jackpot since JDK 11+ is not available') return 0 else: with tempfile.NamedTemporaryFile(mode='w', suffix='.jackpot') as f: for c in jackCmd: print(c, file=f) f.flush() ret = mx.run_java(cmd + ['@' + f.name], nonZeroIsFatal=nonZeroIsFatal, jdk=jdk) if ret != 0: mx.warn('To simulate the failure execute `mx -p {0} jackpot`.'.format(suite.dir)) mx.warn('To fix the error automatically try `mx -p {0} jackpot --apply`'.format(suite.dir)) return ret
def _default_compdb_path(): suite = mx.primary_suite() if suite is None: # no primary suite, don't try to enable compdb return None if suite.vc_dir: return os.path.join(os.path.dirname(suite.vc_dir), 'compile_commands.json') else: return os.path.join(suite.dir, 'compile_commands.json')
def bisect_benchmark(argv): suite = mx.primary_suite() initial_branch = suite.vc.git_command(suite.vc_dir, ['rev-parse', '--abbrev-ref', 'HEAD']).strip() initial_commit = suite.vc.git_command(suite.vc_dir, ['log', '--format=%s', '-n', '1']).strip() email_to = suite.vc.git_command(suite.vc_dir, ['log', '--format=%cE', '-n', '1']).strip() bisect_id = f'{initial_branch}: {initial_commit}' try: _bisect_benchmark(argv, bisect_id, email_to) except Exception: send_email(bisect_id, email_to, "Job failed.\n {}".format(os.environ.get('BUILD_URL', 'Unknown URL'))) raise
def renamegraalpackages(args): """ rename Graal packages to match names in OpenJDK""" parser = ArgumentParser(prog='mx renamegraalpackages') args = parser.parse_args(args) package_suffixes = { 'org.graalvm.collections': ['', '.test'], 'org.graalvm.word': [''], 'org.graalvm.libgraal': ['', '.jdk8', '.jdk11', '.jdk13'] } vc_dir = mx.primary_suite().vc_dir # rename packages for proj_dir in [ join(vc_dir, x) for x in os.listdir(vc_dir) if exists(join(vc_dir, x, 'mx.' + x, 'suite.py')) ]: for dirpath, _, filenames in os.walk(proj_dir): for filename in filenames: if filename.endswith( '.java' ) or filename == 'suite.py' or filename == 'generate_unicode_properties.py': rename_packages(join(dirpath, filename)) # move directories according to new package name for old_name, new_name in package_renamings.items(): for sfx in package_suffixes[old_name]: old_dir = join(proj_dir, 'src', old_name + sfx, 'src', old_name.replace('.', os.sep)) if exists(old_dir): new_name_sfx = new_name + sfx if exists(join(proj_dir, 'src', new_name_sfx)): shutil.rmtree(join(proj_dir, 'src', new_name_sfx)) new_dir = join(proj_dir, 'src', new_name_sfx, 'src', new_name.replace('.', os.sep)) os.makedirs(new_dir) for f in os.listdir(old_dir): shutil.move(os.path.join(old_dir, f), new_dir) shutil.rmtree(join(proj_dir, 'src', old_name + sfx)) # rename in additional place package = 'com.oracle.svm.graal.hotspot.libgraal' filepath = join(vc_dir, 'substratevm', 'src', package, 'src', package.replace('.', os.sep), 'LibGraalEntryPoints.java') with open(filepath) as fp: contents = fp.read() new_contents = contents old_name = 'Java_org_graalvm_libgraal' new_name = 'Java_jdk_internal_vm_compiler_libgraal' new_contents = new_contents.replace(old_name, new_name) if new_contents != contents: with open(filepath, 'w') as fp: fp.write(new_contents)
def verify_jvmci_ci_versions(args): """ Checks that the jvmci versions used in various ci files agree. If the ci.hocon files use a -dev version, it allows the travis ones to use the previous version. For example, if ci.hocon uses jvmci-0.24-dev, travis may use either jvmci-0.24-dev or jvmci-0.23 """ version_pattern = re.compile(r'^(?!\s*#).*jvmci-(?P<version>\d*\.\d*)(?P<dev>-dev)?') def _grep_version(files, msg): version = None dev = None last = None linenr = 0 for filename in files: for line in open(filename): m = version_pattern.search(line) if m: new_version = m.group('version') new_dev = bool(m.group('dev')) if (version and version != new_version) or (dev is not None and dev != new_dev): mx.abort( os.linesep.join([ "Multiple JVMCI versions found in {0} files:".format(msg), " {0} in {1}:{2}: {3}".format(version + ('-dev' if dev else ''), *last), " {0} in {1}:{2}: {3}".format(new_version + ('-dev' if new_dev else ''), filename, linenr, line), ])) last = (filename, linenr, line.rstrip()) version = new_version dev = new_dev linenr += 1 if not version: mx.abort("No JVMCI version found in {0} files!".format(msg)) return version, dev primary_suite = mx.primary_suite() hocon_version, hocon_dev = _grep_version( glob.glob(join(primary_suite.vc_dir, '*.hocon')) + glob.glob(join(primary_suite.dir, 'ci*.hocon')) + glob.glob(join(primary_suite.dir, 'ci*/*.hocon')), 'hocon') travis_version, travis_dev = _grep_version([join(primary_suite.vc_dir, '.travis.yml')], 'TravisCI') if hocon_version != travis_version or hocon_dev != travis_dev: versions_ok = False if not travis_dev and hocon_dev: next_travis_version = [int(a) for a in travis_version.split('.')] next_travis_version[-1] += 1 next_travis_version_str = '.'.join((str(a) for a in next_travis_version)) if next_travis_version_str == hocon_version: versions_ok = True if not versions_ok: mx.abort("Travis and ci.hocon JVMCI versions do not match: {0} vs. {1}".format(travis_version + ('-dev' if travis_dev else ''), hocon_version + ('-dev' if hocon_dev else ''))) mx.log('JVMCI versions are ok!')
def _nodeCostDump(args, extraVMarguments=None): """list the costs associated with each Node type""" import csv, StringIO parser = ArgumentParser(prog='mx nodecostdump') parser.add_argument('--regex', action='store', help="Node Name Regex", default=False, metavar='<regex>') parser.add_argument('--markdown', action='store_const', const=True, help="Format to Markdown table", default=False) args, vmargs = parser.parse_known_args(args) additionalPrimarySuiteClassPath = '-Dprimary.suite.cp=' + mx.primary_suite( ).dir vmargs.extend([ additionalPrimarySuiteClassPath, '-XX:-UseJVMCIClassLoader', 'org.graalvm.compiler.hotspot.NodeCostDumpUtil' ]) out = mx.OutputCapture() regex = "" if args.regex: regex = args.regex run_vm(vmargs + _remove_empty_entries(extraVMarguments) + [regex], out=out) if args.markdown: stringIO = StringIO.StringIO(out.data) reader = csv.reader(stringIO, delimiter=';', lineterminator="\n") firstRow = True maxLen = 0 for row in reader: for col in row: maxLen = max(maxLen, len(col)) stringIO.seek(0) for row in reader: s = '|' if firstRow: firstRow = False nrOfCols = len(row) for col in row: s = s + col + "|" print s s = '|' for _ in range(nrOfCols): s = s + ('-' * maxLen) + '|' else: for col in row: s = s + col + "|" print s else: print out.data
def _find_classes_by_annotated_methods(annotations, dists, jdk=None): if len(dists) == 0: return {} candidates = {} # Create map from jar file to the binary suite distribution defining it jarsToDists = {d.classpath_repr(): d for d in dists} primarySuite = mx.primary_suite() cachesDir = None jarsToParse = [] if primarySuite and primarySuite != mx._mx_suite: cachesDir = mx.ensure_dir_exists( join(primarySuite.get_output_root(), 'unittest')) for d in dists: jar = d.classpath_repr() testclasses = _read_cached_testclasses( cachesDir, jar, jdk if jdk else mx.get_jdk()) if testclasses is not None: for classname in testclasses: candidates[classname] = jarsToDists[jar] else: jarsToParse.append(jar) if jarsToParse: # Ensure Java support class is built mx.build(['--no-daemon', '--dependencies', 'com.oracle.mxtool.junit']) cp = mx.classpath(['com.oracle.mxtool.junit'] + list(jarsToDists.values()), jdk=jdk) out = mx.LinesOutputCapture() mx.run_java([ '-cp', cp, 'com.oracle.mxtool.junit.FindClassesByAnnotatedMethods' ] + annotations + jarsToParse, out=out, addDefaultArgs=False) for line in out.lines: parts = line.split(' ') jar = parts[0] reportedclasses = parts[1:] if len(parts) > 1 else [] testclasses = [c for c in reportedclasses if not c.startswith("!")] excludedclasses = [c for c in reportedclasses if c.startswith("!")] if cachesDir: _write_cached_testclasses(cachesDir, jar, jdk if jdk else mx.get_jdk(), testclasses, excludedclasses) for classname in testclasses: candidates[classname] = jarsToDists[jar] return candidates
def _parseVmArgs(jdk, args, addDefaultArgs=True): args = mx.expand_project_in_args(args, insitu=False) jacocoArgs = mx_gate.get_jacoco_agent_args() if jacocoArgs: args = jacocoArgs + args # Support for -G: options def translateGOption(arg): if arg.startswith('-G:+'): if '=' in arg: mx.abort('Mixing + and = in -G: option specification: ' + arg) arg = '-Dgraal.' + arg[len('-G:+'):] + '=true' elif arg.startswith('-G:-'): if '=' in arg: mx.abort('Mixing - and = in -G: option specification: ' + arg) arg = '-Dgraal.' + arg[len('-G:+'):] + '=false' elif arg.startswith('-G:'): if '=' not in arg: mx.abort('Missing "=" in non-boolean -G: option specification: ' + arg) arg = '-Dgraal.' + arg[len('-G:'):] return arg # add default graal.options.file and translate -G: options options_file = join(mx.primary_suite().dir, 'graal.options') options_file_arg = ['-Dgraal.options.file=' + options_file] if exists(options_file) else [] args = options_file_arg + map(translateGOption, args) if '-G:+PrintFlags' in args and '-Xcomp' not in args: mx.warn('Using -G:+PrintFlags may have no effect without -Xcomp as Graal initialization is lazy') bcp = [mx.distribution('truffle:TRUFFLE_API').classpath_repr()] if _jvmciModes[_vm.jvmciMode]: bcp.extend([d.get_classpath_repr() for d in _bootClasspathDists]) args = ['-Xbootclasspath/p:' + os.pathsep.join(bcp)] + args # Remove JVMCI from class path. It's only there to support compilation. cpIndex, cp = mx.find_classpath_arg(args) if cp: jvmciLib = mx.library('JVMCI').path cp = os.pathsep.join([e for e in cp.split(os.pathsep) if e != jvmciLib]) args[cpIndex] = cp # Set the default JVMCI compiler jvmciCompiler = _compilers[-1] args = ['-Djvmci.Compiler=' + jvmciCompiler] + args if '-version' in args: ignoredArgs = args[args.index('-version') + 1:] if len(ignoredArgs) > 0: mx.log("Warning: The following options will be ignored by the vm because they come after the '-version' argument: " + ' '.join(ignoredArgs)) return jdk.processArgs(args, addDefaultArgs=addDefaultArgs)
def verify_jvmci_ci_versions(args): """ Checks that the jvmci versions used in various ci files agree. If the ci.hocon files use a -dev version, it allows the travis ones to use the previous version. For example, if ci.hocon uses jvmci-0.24-dev, travis may use either jvmci-0.24-dev or jvmci-0.23 """ version_pattern = re.compile(r'^(?!\s*#).*jvmci-(?P<version>\d*\.\d*)(?P<dev>-dev)?') def _grep_version(files, msg): version = None dev = None last = None linenr = 0 for filename in files: for line in open(filename): m = version_pattern.search(line) if m: new_version = m.group('version') new_dev = bool(m.group('dev')) if (version and version != new_version) or (dev is not None and dev != new_dev): mx.abort( os.linesep.join([ "Multiple JVMCI versions found in {0} files:".format(msg), " {0} in {1}:{2}: {3}".format(version + ('-dev' if dev else ''), *last), " {0} in {1}:{2}: {3}".format(new_version + ('-dev' if new_dev else ''), filename, linenr, line), ])) last = (filename, linenr, line.rstrip()) version = new_version dev = new_dev linenr += 1 if not version: mx.abort("No JVMCI version found in {0} files!".format(msg)) return version, dev hocon_version, hocon_dev = _grep_version(glob.glob(join(mx.primary_suite().dir, 'ci*.hocon')) + glob.glob(join(mx.primary_suite().dir, 'ci*/*.hocon')), 'ci.hocon') travis_version, travis_dev = _grep_version(glob.glob('.travis.yml'), 'TravisCI') if hocon_version != travis_version or hocon_dev != travis_dev: versions_ok = False if not travis_dev and hocon_dev: next_travis_version = [int(a) for a in travis_version.split('.')] next_travis_version[-1] += 1 next_travis_version_str = '.'.join((str(a) for a in next_travis_version)) if next_travis_version_str == hocon_version: versions_ok = True if not versions_ok: mx.abort("Travis and ci.hocon JVMCI versions do not match: {0} vs. {1}".format(travis_version + ('-dev' if travis_dev else ''), hocon_version + ('-dev' if hocon_dev else ''))) mx.log('JVMCI versions are ok!')
def dimensions(self, suite, mxBenchmarkArgs, bmSuiteArgs): standard = { "metric.uuid": self.uid(), "group": self.group(suite), "subgroup": suite.subgroup(), "bench-suite": suite.name(), "config.vm-flags": " ".join(suite.vmArgs(bmSuiteArgs)), "config.run-flags": " ".join(suite.runArgs(bmSuiteArgs)), "config.build-flags": self.buildFlags(), "config.platform-version": "", "machine.name": self.machineName(mxBenchmarkArgs), "machine.hostname": self.machineHostname(), "machine.arch": self.machineArch(), "machine.cpu-cores": self.machineCpuCores(), "machine.cpu-clock": self.machineCpuClock(), "machine.cpu-family": self.machineCpuFamily(), "machine.ram": self.machineRam(), "branch": self.branch(), "build.url": self.buildUrl(), "build.number": self.buildNumber(), "metric.score-function": "id", "warnings": "", } def commit_info(prefix, mxsuite): vc = mxsuite.vc if vc is None: return {} info = vc.parent_info(mxsuite.dir) return { prefix + "commit.rev": vc.parent(mxsuite.dir), prefix + "commit.repo-url": vc.default_pull(mxsuite.dir), prefix + "commit.author": info["author"], prefix + "commit.author-ts": info["author-ts"], prefix + "commit.committer": info["committer"], prefix + "commit.committer-ts": info["committer-ts"], } standard.update(commit_info("", mx.primary_suite())) for (name, mxsuite) in mx._suites.iteritems(): ignored = mxBenchmarkArgs.ignore_suite_commit_info if ignored and name in ignored: continue standard.update(commit_info("extra." + name + ".", mxsuite)) triggering_suite = self.triggeringSuite(mxBenchmarkArgs) if triggering_suite: mxsuite = mx._suites[triggering_suite] standard.update(commit_info("extra.triggering-repo.", mxsuite)) return standard
def rbcheck(args): '''Checks FastR builtins against GnuR gnur-only: GnuR builtins not implemented in FastR (i.e. TODO list). fastr-only: FastR builtins not implemented in GnuR both-diff: implemented in both GnuR and FastR, but with difference in signature (e.g. visibility) both: implemented in both GnuR and FastR with matching signature If the option --filter is not given, shows all groups. Multiple groups can be combined: e.g. "--filter gnur-only,fastr-only"''' cp = mx.classpath('com.oracle.truffle.r.test') args.append("--suite-path") args.append(mx.primary_suite().dir) mx.run_java(['-cp', cp, 'com.oracle.truffle.r.test.tools.RBuiltinCheck'] + args)
def rbcheck(args): '''Checks FastR builtins against GnuR gnur-only: GnuR builtins not implemented in FastR (i.e. TODO list). fastr-only: FastR builtins not implemented in GnuR both-diff: implemented in both GnuR and FastR, but with difference in signature (e.g. visibility) both: implemented in both GnuR and FastR with matching signature If the option --filter is not given, shows all groups. Multiple groups can be combined: e.g. "--filter gnur-only,fastr-only"''' vmArgs = mx.get_runtime_jvm_args('com.oracle.truffle.r.test') args.append("--suite-path") args.append(mx.primary_suite().dir) vmArgs += ['com.oracle.truffle.r.test.tools.RBuiltinCheck'] mx.run_java(vmArgs + args)
def _buildGOptionsArgs(args): def _translateGOption(arg): if arg.startswith('-G:+'): if '=' in arg: mx.abort('Mixing + and = in -G: option specification: ' + arg) arg = '-Dgraal.' + arg[len('-G:+'):] + '=true' elif arg.startswith('-G:-'): if '=' in arg: mx.abort('Mixing - and = in -G: option specification: ' + arg) arg = '-Dgraal.' + arg[len('-G:+'):] + '=false' elif arg.startswith('-G:'): if '=' not in arg: mx.abort('Missing "=" in non-boolean -G: option specification: ' + arg) arg = '-Dgraal.' + arg[len('-G:'):] return arg # add default graal.options.file and translate -G: options options_file = join(mx.primary_suite().dir, 'graal.options') options_file_arg = ['-Dgraal.options.file=' + options_file] if exists(options_file) else [] return options_file_arg + map(_translateGOption, args)
def find_test_candidates(annotations, suite, jdk, buildCacheDir='unittest'): """ Finds all classes containing methods annotated with one of the supplied annotations. To speed up subsequent invocations, the results are cached in the `buildCacheDir`. :param list annotations: a list of annotations to recognize test methods, e.g. ['@Test', '@Parameters'] :param suite: the mx suite in which to look for test classes. If no suite is given, the primary suite is used. :param JDKConfig jdk: the JDK for which the list of classes must be found :param str buildCacheDir: a path relative to the mx suite output root that is used to store the cache files. :return: a dictionary associating each found test class with the distribution it occurs in. """ assert not isabs(buildCacheDir), "buildCacheDir must be a relative path" compat_suite = suite if suite else mx.primary_suite() if suite != mx._mx_suite and compat_suite.getMxCompatibility( ).useDistsForUnittest(): jar_distributions = [ d for d in mx.sorted_dists() if d.isJARDistribution() and exists(d.classpath_repr( resolve=False)) and (not suite or d.suite == suite) ] # find a corresponding distribution for each test candidates = _find_classes_by_annotated_methods( annotations, jar_distributions, buildCacheDir, jdk) else: binary_deps = [ d for d in mx.dependencies(opt_limit_to_suite=True) if d.isJARDistribution() and isinstance(d.suite, mx.BinarySuite) and (not suite or suite == d.suite) ] candidates = _find_classes_by_annotated_methods( annotations, binary_deps, buildCacheDir, jdk) for p in mx.projects(opt_limit_to_suite=True): if not p.isJavaProject(): continue if suite and not p.suite == suite: continue if jdk.javaCompliance < p.javaCompliance: continue for c in _find_classes_with_annotations(p, None, annotations): candidates[c] = p return candidates
def _buildGOptionsArgs(args): def _translateGOption(arg): if arg.startswith("-G:+"): if "=" in arg: mx.abort("Mixing + and = in -G: option specification: " + arg) arg = "-Dgraal." + arg[len("-G:+") :] + "=true" elif arg.startswith("-G:-"): if "=" in arg: mx.abort("Mixing - and = in -G: option specification: " + arg) arg = "-Dgraal." + arg[len("-G:+") :] + "=false" elif arg.startswith("-G:"): if "=" not in arg: mx.abort('Missing "=" in non-boolean -G: option specification: ' + arg) arg = "-Dgraal." + arg[len("-G:") :] return arg # add default graal.options.file and translate -G: options options_file = join(mx.primary_suite().dir, "graal.options") options_file_arg = ["-Dgraal.options.file=" + options_file] if exists(options_file) else [] return options_file_arg + map(_translateGOption, args)
def _nodeCostDump(args, extraVMarguments=None): """list the costs associated with each Node type""" import csv, StringIO parser = ArgumentParser(prog='mx nodecostdump') parser.add_argument('--regex', action='store', help="Node Name Regex", default=False, metavar='<regex>') parser.add_argument('--markdown', action='store_const', const=True, help="Format to Markdown table", default=False) args, vmargs = parser.parse_known_args(args) additionalPrimarySuiteClassPath = '-Dprimary.suite.cp=' + mx.primary_suite().dir vmargs.extend([additionalPrimarySuiteClassPath, '-XX:-UseJVMCIClassLoader', 'org.graalvm.compiler.hotspot.NodeCostDumpUtil']) out = mx.OutputCapture() regex = "" if args.regex: regex = args.regex run_vm(vmargs + _remove_empty_entries(extraVMarguments) + [regex], out=out) if args.markdown: stringIO = StringIO.StringIO(out.data) reader = csv.reader(stringIO, delimiter=';', lineterminator="\n") firstRow = True maxLen = 0 for row in reader: for col in row: maxLen = max(maxLen, len(col)) stringIO.seek(0) for row in reader: s = '|' if firstRow: firstRow = False nrOfCols = len(row) for col in row: s = s + col + "|" print s s = '|' for _ in range(nrOfCols): s = s + ('-' * maxLen) + '|' else: for col in row: s = s + col + "|" print s else: print out.data
def dimensions(self, suite, mxBenchmarkArgs, bmSuiteArgs): standard = { "metric.uuid": self.uid(), "group": self.group(suite), "subgroup": suite.subgroup(), "bench-suite": suite.name(), "config.vm-flags": " ".join(suite.vmArgs(bmSuiteArgs)), "config.run-flags": " ".join(suite.runArgs(bmSuiteArgs)), "config.build-flags": self.buildFlags(), "machine.name": self.machineName(mxBenchmarkArgs), "machine.hostname": self.machineHostname(), "machine.arch": self.machineArch(), "machine.cpu-cores": self.machineCpuCores(), "machine.cpu-clock": self.machineCpuClock(), "machine.cpu-family": self.machineCpuFamily(), "machine.ram": self.machineRam(), "branch": self.branch(), "build.url": self.buildUrl(), "build.number": self.buildNumber(), } def commit_info(prefix, mxsuite, include_ts=False): vc = mxsuite.vc if vc is None: return {} info = vc.parent_info(mxsuite.dir) return { prefix + "commit.rev": vc.parent(mxsuite.dir), prefix + "commit.repo-url": vc.default_pull(mxsuite.dir), prefix + "commit.author": info["author"], prefix + "commit.author-ts": info["author-ts"], prefix + "commit.committer": info["committer"], prefix + "commit.committer-ts": info["committer-ts"], } standard.update(commit_info("", mx.primary_suite(), include_ts=True)) for (name, mxsuite) in mx._suites.iteritems(): standard.update(commit_info("extra." + name + ".", mxsuite, include_ts=False)) return standard
def _find_classes_by_annotated_methods(annotations, dists, jdk=None): if len(dists) == 0: return {} candidates = {} # Create map from jar file to the binary suite distribution defining it jarsToDists = {d.classpath_repr(): d for d in dists} primarySuite = mx.primary_suite() cachesDir = None jarsToParse = [] if primarySuite and primarySuite != mx._mx_suite: cachesDir = mx.ensure_dir_exists(join(primarySuite.get_output_root(), 'unittest')) for d in dists: jar = d.classpath_repr() testclasses = _read_cached_testclasses(cachesDir, jar) if testclasses is not None: for classname in testclasses: candidates[classname] = jarsToDists[jar] else: jarsToParse.append(jar) if jarsToParse: # Ensure Java support class is built mx.build(['--no-daemon', '--dependencies', 'com.oracle.mxtool.junit']) cp = mx.classpath(['com.oracle.mxtool.junit'] + jarsToDists.values(), jdk=jdk) out = mx.LinesOutputCapture() mx.run_java(['-cp', cp, 'com.oracle.mxtool.junit.FindClassesByAnnotatedMethods'] + annotations + jarsToParse, out=out, addDefaultArgs=False) for line in out.lines: parts = line.split(' ') jar = parts[0] testclasses = parts[1:] if len(parts) > 1 else [] if cachesDir: _write_cached_testclasses(cachesDir, jar, testclasses) for classname in testclasses: candidates[classname] = jarsToDists[jar] return candidates
def build(args, vm=None): if any([opt in args for opt in ['-h', '--help']]): orig_command_build(args, vm) mx.log('build: Checking SubstrateVM requirements for building ...') if not _host_os_supported(): mx.abort('build: SubstrateVM can be built only on Darwin, Linux and Windows platforms') graal_compiler_flags_dir = join(mx.dependency('substratevm:com.oracle.svm.driver').dir, 'resources') def update_if_needed(version_tag, graal_compiler_flags): flags_filename = 'graal-compiler-flags-' + version_tag + '.config' flags_path = join(graal_compiler_flags_dir, flags_filename) flags_contents = '\n'.join(graal_compiler_flags) needs_update = True try: with open(flags_path, 'r') as flags_file: if flags_file.read() == flags_contents: needs_update = False except: pass if needs_update: with open(flags_path, 'w') as f: print('Write file ' + flags_path) f.write(flags_contents) update_if_needed("versions", GRAAL_COMPILER_FLAGS_MAP.keys()) for version_tag in GRAAL_COMPILER_FLAGS_MAP: update_if_needed(version_tag, GRAAL_COMPILER_FLAGS_BASE + GRAAL_COMPILER_FLAGS_MAP[version_tag]) orig_command_build(args, vm) if 'substratevm' in mx.primary_suite().name: # build "jvm" config used by native-image and native-image-configure commands config = graalvm_jvm_configs[-1] build_native_image_image(config)
def gate(args): """run the tests used to validate a push If this command exits with a 0 exit code, then the gate passed.""" parser = ArgumentParser(prog="mx gate") add_omit_clean_args(parser) parser.add_argument( "--all-suites", action="store_true", help="run gate tasks for all suites, not just the primary suite" ) parser.add_argument( "--dry-run", action="store_true", help="just show the tasks that will be run without running them" ) parser.add_argument("-x", action="store_true", help="makes --task-filter an exclusion instead of inclusion filter") parser.add_argument("--jacocout", help="specify the output directory for jacoco report") parser.add_argument( "--strict-mode", action="store_true", help="abort if a task cannot be executed due to missing tool configuration", ) filtering = parser.add_mutually_exclusive_group() filtering.add_argument( "-t", "--task-filter", help="comma separated list of substrings to select subset of tasks to be run" ) filtering.add_argument("-s", "--start-at", help="substring to select starting task") for a, k in _extra_gate_arguments: parser.add_argument(*a, **k) args = parser.parse_args(args) cleanArgs = check_gate_noclean_arg(args) global _jacoco if args.dry_run: Task.dryRun = True if args.start_at: Task.startAtFilter = args.start_at elif args.task_filter: Task.filters = args.task_filter.split(",") Task.filtersExclude = args.x elif args.x: mx.abort("-x option cannot be used without --task-filter option") tasks = [] total = Task("Gate") try: with Task("Versions", tasks) as t: if t: mx.command_function("version")(["--oneline"]) mx.command_function("sversions")([]) with Task("JDKReleaseInfo", tasks) as t: if t: jdkDirs = os.pathsep.join([mx.get_env("JAVA_HOME", ""), mx.get_env("EXTRA_JAVA_HOMES", "")]) for jdkDir in jdkDirs.split(os.pathsep): release = join(jdkDir, "release") if exists(release): mx.log("==== " + jdkDir + " ====") with open(release) as fp: mx.log(fp.read().strip()) with Task("Pylint", tasks) as t: if t: if mx.command_function("pylint")([]) != 0: _warn_or_abort("Pylint not configured correctly. Cannot execute Pylint task.", args.strict_mode) gate_clean(cleanArgs, tasks) with Task("Distribution Overlap Check", tasks) as t: if t: if mx.command_function("checkoverlap")([]) != 0: t.abort("Found overlapping distributions.") with Task("Canonicalization Check", tasks) as t: if t: mx.log(time.strftime("%d %b %Y %H:%M:%S - Ensuring mx/projects files are canonicalized...")) if mx.command_function("canonicalizeprojects")([]) != 0: t.abort('Rerun "mx canonicalizeprojects" and check-in the modified mx/suite*.py files.') if mx.get_env("JDT"): with Task("BuildJavaWithEcj", tasks) as t: if t: mx.command_function("build")(["-p", "--no-native", "--warning-as-error"]) gate_clean(cleanArgs, tasks, name="CleanAfterEcjBuild") else: _warn_or_abort("JDT environment variable not set. Cannot execute BuildJavaWithEcj task.", args.strict_mode) with Task("BuildJavaWithJavac", tasks) as t: if t: mx.command_function("build")(["-p", "--warning-as-error", "--no-native", "--force-javac"]) with Task("IDEConfigCheck", tasks) as t: if t: if args.cleanIDE: mx.command_function("ideclean")([]) mx.command_function("ideinit")([]) eclipse_exe = mx.get_env("ECLIPSE_EXE") if eclipse_exe is not None: with Task("CodeFormatCheck", tasks) as t: if t and mx.command_function("eclipseformat")(["-e", eclipse_exe]) != 0: t.abort('Formatter modified files - run "mx eclipseformat", check in changes and repush') else: _warn_or_abort( "ECLIPSE_EXE environment variable not set. Cannot execute CodeFormatCheck task.", args.strict_mode ) with Task("Checkstyle", tasks) as t: if t and mx.command_function("checkstyle")(["--primary"]) != 0: t.abort("Checkstyle warnings were found") with Task("Checkheaders", tasks) as t: if t and mx.command_function("checkheaders")([]) != 0: t.abort("Checkheaders warnings were found") with Task("FindBugs", tasks) as t: if t and mx.command_function("findbugs")([]) != 0: t.abort("FindBugs warnings were found") if exists("jacoco.exec"): os.unlink("jacoco.exec") if args.jacocout is not None: _jacoco = "append" else: _jacoco = "off" for suiteRunner in _gate_runners: suite, runner = suiteRunner if args.all_suites or suite is mx.primary_suite(): runner(args, tasks) if args.jacocout is not None: mx.command_function("jacocoreport")([args.jacocout]) _jacoco = "off" except KeyboardInterrupt: total.abort(1) except BaseException as e: import traceback traceback.print_exc() total.abort(str(e)) total.stop() mx.log("Gate task times:") for t in tasks: mx.log(" " + str(t.duration) + "\t" + t.title) mx.log(" =======") mx.log(" " + str(total.duration)) if args.task_filter: Task.filters = None
def _bisect_benchmark(argv, initial_branch, email_to): if 'BISECT_BENCHMARK_CONFIG' in os.environ: import configparser cp = configparser.ConfigParser() cp.read(os.environ['BISECT_BENCHMARK_CONFIG']) sec = cp['bisect-benchmark'] args = types.SimpleNamespace() args.bad = sec['bad'] args.good = sec['good'] args.build_command = sec['build_command'] args.benchmark_command = sec['benchmark_command'] args.benchmark_criterion = sec.get('benchmark_criterion', 'BEST') args.enterprise = sec.getboolean('enterprise', False) else: parser = argparse.ArgumentParser() parser.add_argument('bad', help="Bad commit for bisection") parser.add_argument('good', help="Good commit for bisection") parser.add_argument( 'build_command', help="Command to run in order to build the configuration") parser.add_argument( 'benchmark_command', help= "Command to run in order to run the benchmark. Output needs to be in mx's format" ) parser.add_argument( '--benchmark-criterion', default='BEST', help="Which result parameter should be used for comparisons") parser.add_argument('--enterprise', action='store_true', help="Whether to checkout graal-enterprise") args = parser.parse_args(argv) primary_suite = mx.primary_suite() fetched_enterprise = [False] def benchmark_callback(suite, commit): suite.vc.update_to_branch(suite.vc_dir, commit) mx.run_mx(['sforceimports'], suite=suite) mx.run_mx(['--env', 'ce', 'sforceimports'], suite=get_suite('/vm')) if args.enterprise and suite.name != 'vm-enterprise': checkout_args = [ '--dynamicimports', '/vm-enterprise', 'checkout-downstream', 'vm', 'vm-enterprise' ] if fetched_enterprise[0]: checkout_args.append('--no-fetch') mx.run_mx(checkout_args, out=mx.OutputCapture()) # Make sure vm is imported before vm-enterprise get_suite('/vm') mx.run_mx(['--env', 'ee', 'sforceimports'], suite=get_suite('/vm-enterprise')) fetched_enterprise[0] = True suite.vc.update_to_branch(suite.vc_dir, commit) mx.run_mx(['sforceimports'], suite=suite) debug_str = "debug: graalpython={} graal={}".format( get_commit(get_suite('graalpython')), get_commit(get_suite('/vm'))) if args.enterprise: debug_str += " graal-enterprise={}".format( get_commit(get_suite('/vm-enterprise'))) print(debug_str) env = os.environ.copy() env['MX_ALT_OUTPUT_ROOT'] = 'mxbuild-{}'.format(commit) retcode = mx.run(shlex.split(args.build_command), env=env, nonZeroIsFatal=False) if retcode: raise RuntimeError( "Failed to execute the build command for {}".format(commit)) output = mx.OutputCapture() retcode = mx.run(shlex.split(args.benchmark_command), env=env, out=mx.TeeOutputCapture(output), nonZeroIsFatal=False) if retcode: raise RuntimeError( "Failed to execute benchmark for {}".format(commit)) match = re.search( r'{}.*duration: ([\d.]+)'.format( re.escape(args.benchmark_criterion)), output.data) if not match: raise RuntimeError("Failed to get result from the benchmark") return float(match.group(1)) bad = get_commit(primary_suite, args.bad) good = get_commit(primary_suite, args.good) result = run_bisect_benchmark(primary_suite, bad, good, benchmark_callback) visualization = result.visualize() summary = result.summarize() print() print(visualization) print() print(summary) if 'CI' not in os.environ: print( "You can rerun a benchmark for a particular commit using:\nMX_ALT_OUTPUT_ROOT=mxbuild-$commit {}" .format(args.benchmark_command)) send_email( initial_branch, email_to, "Bisection job has finished successfully.\n{}\n".format(summary) + "Note I'm just a script and I don't validate statistical significance of the above result.\n" + "Please take a moment to also inspect the detailed results below.\n\n{}\n\n" .format(visualization) + os.environ.get('BUILD_URL', 'Unknown URL'))
def sonarqube_upload(args): """run SonarQube scanner and upload JaCoCo results""" sonarqube_cli = mx.library("SONARSCANNER_CLI_4_2_0_1873", True) parser = ArgumentParser(prog='mx sonarqube-upload') parser.add_argument('--exclude-generated', action='store_true', help='Exclude generated source files') parser.add_argument('--skip-coverage', action='store_true', default=False, help='Do not upload coverage reports') args, sonar_args = mx.extract_VM_args(args, useDoubleDash=True, defaultAllVMArgs=True) args, other_args = parser.parse_known_args(args) java_props, other_args = _parse_java_properties(other_args) def _check_required_prop(prop): if prop not in java_props: mx.abort("Required property '{prop}' not present. (Format is '-D{prop}=<value>')".format(prop=prop)) _check_required_prop('sonar.projectKey') _check_required_prop('sonar.host.url') basedir = mx.primary_suite().dir # collect excluded projects excludes, includes = _jacoco_excludes_includes_projects(limit_to_primary=True) # collect excluded classes exclude_classes = _jacoco_exclude_classes(includes) java_bin = [] java_src = [] java_libs = [] def _visit_deps(dep, edge): if dep.isJARDistribution() or dep.isLibrary(): java_libs.append(dep.classpath_repr()) mx.walk_deps(includes, visit=_visit_deps) # collect all sources and binaries -- do exclusion later for p in includes: java_src.extend(p.source_dirs()) if not args.exclude_generated: gen_dir = p.source_gen_dir() if os.path.exists(gen_dir): java_src.append(gen_dir) java_bin.append(p.output_dir()) java_src = [os.path.relpath(s, basedir) for s in java_src] java_bin = [os.path.relpath(b, basedir) for b in java_bin] # Overlayed sources and classes must be excluded jdk_compliance = mx.get_jdk().javaCompliance overlayed_sources = [] overlayed_classfiles = {} for p in includes: if hasattr(p, "multiReleaseJarVersion") and jdk_compliance not in p.javaCompliance: # JDK9+ overlays for srcDir in p.source_dirs(): for root, _, files in os.walk(srcDir): for name in files: if name.endswith('.java') and name != 'package-info.java': overlayed_sources.append(join(os.path.relpath(root, basedir), name)) elif hasattr(p, "overlayTarget"): # JDK8 overlays target = mx.project(p.overlayTarget) overlay_sources = [] for srcDir in p.source_dirs(): for root, _, files in os.walk(srcDir): for name in files: if name.endswith('.java') and name != 'package-info.java': overlay_sources.append(join(os.path.relpath(root, srcDir), name)) print(p, target, overlay_sources) for srcDir in target.source_dirs(): for root, _, files in os.walk(srcDir): for name in files: if name.endswith('.java') and name != 'package-info.java': s = join(os.path.relpath(root, srcDir), name) if s in overlay_sources: overlayed = join(os.path.relpath(root, basedir), name) overlayed_sources.append(overlayed) for s in overlay_sources: classfile = join(os.path.relpath(target.output_dir(), basedir), s[:-len('java')] + 'class') with open(classfile, 'rb') as fp: overlayed_classfiles[classfile] = fp.read() exclude_dirs = [] for p in excludes: exclude_dirs.extend(p.source_dirs()) exclude_dirs.append(p.source_gen_dir()) javaCompliance = max([p.javaCompliance for p in includes]) if includes else mx.JavaCompliance('1.7') jacoco_exec = get_jacoco_dest_file() if not os.path.exists(jacoco_exec) and not args.skip_coverage: mx.abort('No JaCoCo report file found: ' + jacoco_exec) def _add_default_prop(key, value): if key not in java_props: java_props[key] = value # default properties _add_default_prop('sonar.java.source', str(javaCompliance)) _add_default_prop('sonar.projectBaseDir', basedir) if not args.skip_coverage: _add_default_prop('sonar.jacoco.reportPaths', jacoco_exec) _add_default_prop('sonar.sources', ','.join(java_src)) _add_default_prop('sonar.java.binaries', ','.join(java_bin)) _add_default_prop('sonar.java.libraries', ','.join(java_libs)) exclude_patterns = [os.path.relpath(e, basedir) + '**' for e in exclude_dirs] + \ overlayed_sources + \ list(set([os.path.relpath(match[0], basedir) for _, match in exclude_classes.items()])) if exclude_patterns: _add_default_prop('sonar.exclusions', ','.join(exclude_patterns)) _add_default_prop('sonar.coverage.exclusions', ','.join(exclude_patterns)) _add_default_prop('sonar.verbose', 'true' if mx._opts.verbose else 'false') with tempfile.NamedTemporaryFile(suffix="-sonarqube.properties", mode="w+") as fp: # prepare properties file fp.writelines(('{}={}\n'.format(k, v) for k, v in java_props.items())) fp.flush() # Since there's no options to exclude individual classes, # we temporarily delete the overlayed class files instead. for classfile in overlayed_classfiles: os.remove(classfile) try: # run sonarqube cli java_args = other_args + ['-Dproject.settings=' + fp.name, '-jar', sonarqube_cli.get_path(True)] + sonar_args exit_code = mx.run_java(java_args, nonZeroIsFatal=False) finally: # Restore temporarily deleted class files for classfile, data in overlayed_classfiles.items(): with open(classfile, 'wb') as cf: cf.write(data) if exit_code != 0: fp.seek(0) mx.abort('SonarQube scanner terminated with non-zero exit code: {}\n Properties file:\n{}'.format( exit_code, ''.join((' ' + l for l in fp.readlines()))))
def coverage_upload(args): parser = ArgumentParser(prog='mx coverage-upload') parser.add_argument('--upload-url', required=False, default=mx.get_env('COVERAGE_UPLOAD_URL'), help='Format is like rsync: user@host:/directory') parser.add_argument('--build-name', required=False, default=mx.get_env('BUILD_NAME')) parser.add_argument('--build-url', required=False, default=mx.get_env('BUILD_URL')) parser.add_argument('--build-number', required=False, default=mx.get_env('BUILD_NUMBER')) args, other_args = parser.parse_known_args(args) if not args.upload_url: parser.print_help() return remote_host, remote_basedir = args.upload_url.split(':') if not remote_host: mx.abort('Cannot determine remote host from {}'.format(args.upload_url)) primary = mx.primary_suite() info = primary.vc.parent_info(primary.dir) rev = primary.vc.parent(primary.dir) if len(remote_basedir) > 0 and not remote_basedir.endswith('/'): remote_basedir += '/' remote_dir = '{}_{}_{}'.format(primary.name, datetime.datetime.fromtimestamp(info['author-ts']).strftime('%Y-%m-%d_%H_%M'), rev[:7]) if args.build_name: remote_dir += '_' + args.build_name if args.build_number: remote_dir += '_' + args.build_number upload_dir = remote_basedir + remote_dir includes, excludes = _jacocoreport(['--omit-excluded'] + other_args) # Upload jar+sources coverage_sources = 'java_sources.tar.gz' coverage_binaries = 'java_binaries.tar.gz' with mx.Archiver(os.path.realpath(coverage_sources), kind='tgz') as sources, mx.Archiver(os.path.realpath(coverage_binaries), kind='tgz') as binaries: def _visit_deps(dep, edge): if dep.isJavaProject() and not dep.is_test_project(): binaries.zf.add(dep.output_dir(), dep.name) for d in dep.source_dirs(): sources.zf.add(d, dep.name) if os.path.exists(dep.source_gen_dir()): sources.zf.add(dep.source_gen_dir(), dep.name) mx.walk_deps(mx.projects(), visit=_visit_deps) files = [get_jacoco_dest_file(), 'coverage', coverage_sources, coverage_binaries] print("Syncing {} to {}:{}".format(" ".join(files), remote_host, upload_dir)) mx.run([ 'bash', '-c', r'tar -czf - {files} | ssh {remote} bash -c \'"mkdir -p {remotedir} && cd {remotedir} && cat | tar -x{verbose}z && chmod -R 755 ."\'' .format( files=" ".join(files), remote=remote_host, remotedir=upload_dir, verbose='v' if mx._opts.verbose else '') ]) def upload_string(content, path): mx.run(['ssh', remote_host, 'bash', '-c', 'cat > "' + path + '"'], stdin=content) upload_string(json.dumps({ 'timestamp': time.time(), 'suite': primary.name, 'revision': rev, 'directory': remote_dir, 'build_name': args.build_name, 'build_url': args.build_url, 'jdk_version': str(mx.get_jdk().version), 'build_number': args.build_number, 'primary_info': info, 'excludes': [str(e) for e in excludes], 'includes': [str(i) for i in includes]}), upload_dir + '/description.json') mx.run(['ssh', remote_host, 'bash', '-c', r'"(echo \[; for i in {remote_basedir}/*/description.json; do if \[ -s \$i \];then cat \$i; echo ,; fi done; echo null\]) > {remote_basedir}/index.json"'.format(remote_basedir=remote_basedir)]) upload_string("""<html> <script language="javascript"> function urlChange(url) { if (url.pathname !== "blank") { window.history.replaceState(null, null, url.pathname.replace("/coverage_upload/", "/coverage_upload/#")) } } </script> <frameset rows="40,*"> <frame id="navigation" src="navigation.html"/> <frame id="content" src="" onload="urlChange(this.contentWindow.location);" /> </frameset> </html>""", remote_basedir + '/index.html') js_library_url = rewriteurl("https://ajax.googleapis.com/ajax/libs/angularjs/1.7.7/angular.js") upload_string(r"""<html> <head> <script src="%js_library_url"></script> <script language="javascript"> var App = angular.module('myApp', []) .controller('IndexCtrl', function IndexCtrl($scope, $http) { var hash = parent.window.location.hash; if(hash) { hash = hash.substring(1, hash.length); // remove leading hash } $http.get('index.json').then(function(response, status) { var data = response.data.filter(x => x != null); /* #GR-17399 Filter build that are unique per suite with revision as key and merge builds. */ data = data .filter(x => !x.hasOwnProperty('merge')) .filter( // filter builds that are unique per suite with revision as key x => !data .filter(z => x != z && x.suite == z.suite) // exclude self build and build for other suites. .map(z => z.revision) // map from array of build to array of revision .includes(x.revision) // check if revision of x is index data. ).concat(data.filter(x => x.hasOwnProperty('merge'))); // concat unique build with merged build. data.sort((l,r) => r.timestamp - l.timestamp); if(data.length > 0) { var startdir; if(hash) { startdir = data.find(build => hash.includes(build.directory)); startdir.hash = hash; } if(!startdir) { startdir = data[0]; } $scope.directory = startdir; } $scope.data = data; }); $scope.$watch('directory', (dir, olddir) => { if(dir) { var content = parent.document.getElementById("content"); var contentDocument = content.contentDocument || content.contentWindow.document; var newpath; if(olddir && olddir.suite === dir.suite) { newpath = contentDocument.location.href.replace(olddir.directory, dir.directory); } else { newpath = dir.hasOwnProperty('hash') ? hash : dir.directory + "/coverage/"; } contentDocument.location.href = newpath; parent.window.history.replaceState(undefined, undefined, "#" + newpath.replace(/^.+coverage_upload\//, "")); } }); $scope.step = (i) => $scope.directory = $scope.data[$scope.data.indexOf($scope.directory)+i]; }); function copy(url) { var content = parent.document.getElementById("content"); var contentDocument = content.contentDocument || content.contentWindow.document; var copyText = document.getElementById("copy"); copyText.value = contentDocument.location.href.replace("coverage_upload/", "coverage_upload/#"); copyText.select(); document.execCommand("copy"); } </script> </head> <body ng-app="myApp" ng-controller="IndexCtrl"> <button ng-click="step(1)" ng-disabled="data.indexOf(directory) >= data.length-1"><<</button> <button ng-click="step(-1)" ng-disabled="data.indexOf(directory) <= 0">>></button> <select ng-model="directory" ng-options="(i.primary_info['author-ts']*1000|date:'yy-MM-dd hh:mm') + ' ' + i.build_name + ' ' + i.revision.substr(0,8) group by i.suite for i in data"></select> <a href="{{directory.build_url}}" ng-if="directory.build_url" target="_blank">Build</a> Commit: {{directory.revision.substr(0,5)}}: {{directory.primary_info.description}} <input type="text" style="opacity: 0;width: 20;" id="copy" /> <button style="float: right;" onclick="copy(window.location);">Share url</button> </body> </html>""".replace("%js_library_url", js_library_url), remote_basedir + '/navigation.html')
def _run_gate(cleanArgs, args, tasks): global _jacoco with Task('Versions', tasks, tags=[Tags.always]) as t: if t: mx.command_function('version')(['--oneline']) mx.command_function('sversions')([]) mx.log("Python version: {}".format(sys.version_info)) with Task('JDKReleaseInfo', tasks, tags=[Tags.always]) as t: if t: jdkDirs = os.pathsep.join([mx.get_env('JAVA_HOME', ''), mx.get_env('EXTRA_JAVA_HOMES', '')]) for jdkDir in jdkDirs.split(os.pathsep): release = join(jdkDir, 'release') if exists(release): mx.log('==== ' + jdkDir + ' ====') with open(release) as fp: mx.log(fp.read().strip()) if mx.primary_suite() is mx._mx_suite: _run_mx_suite_tests() with Task('VerifyMultiReleaseProjects', tasks, tags=[Tags.always]) as t: if t: mx.command_function('verifymultireleaseprojects')([]) for suiteRunner in _pre_gate_runners: suite, runner = suiteRunner if args.all_suites or suite is mx.primary_suite(): runner(args, tasks) with Task('Pylint', tasks, tags=[Tags.style]) as t: if t: if mx.command_function('pylint')(['--primary']) != 0: _warn_or_abort('Pylint not configured correctly. Cannot execute Pylint task.', args.strict_mode) gate_clean(cleanArgs, tasks, tags=[Tags.build, Tags.fullbuild, Tags.ecjbuild]) with Task('Distribution Overlap Check', tasks, tags=[Tags.style]) as t: if t: if mx.command_function('checkoverlap')([]) != 0: t.abort('Found overlapping distributions.') with Task('Canonicalization Check', tasks, tags=[Tags.style]) as t: if t: mx.log(time.strftime('%d %b %Y %H:%M:%S - Ensuring mx/projects files are canonicalized...')) if mx.command_function('canonicalizeprojects')([]) != 0: t.abort('Rerun "mx canonicalizeprojects" and modify the suite.py files as suggested.') with Task('Verify Java Sources in Project', tasks, tags=[Tags.style]) as t: if t: mx.log(time.strftime('%d %b %Y %H:%M:%S - Ensuring all Java sources are in a Java project directory...')) if mx.command_function('verifysourceinproject')([]) != 0: t.abort('Move or delete the Java sources that are not in a Java project directory.') if mx._is_supported_by_jdt(mx.DEFAULT_JDK_TAG): with Task('BuildWithEcj', tasks, tags=[Tags.fullbuild, Tags.ecjbuild], legacyTitles=['BuildJavaWithEcj']) as t: if t: defaultBuildArgs = ['-p'] fullbuild = True if Task.tags is None else Tags.fullbuild in Task.tags # pylint: disable=unsupported-membership-test # Using ecj alone is not compatible with --warning-as-error (see GR-3969) if not args.no_warning_as_error and fullbuild: defaultBuildArgs += ['--warning-as-error'] if mx.get_env('JDT'): mx.command_function('build')(defaultBuildArgs + args.extra_build_args) if fullbuild: gate_clean(cleanArgs, tasks, name='CleanAfterEcjBuild', tags=[Tags.fullbuild]) else: _warn_or_abort('JDT environment variable not set. Cannot execute BuildWithEcj task.', args.strict_mode) with Task('BuildWithJavac', tasks, tags=[Tags.build, Tags.fullbuild], legacyTitles=['BuildJavaWithJavac']) as t: if t: defaultBuildArgs = ['-p'] if not args.no_warning_as_error: defaultBuildArgs += ['--warning-as-error'] mx.command_function('build')(defaultBuildArgs + ['--force-javac'] + args.extra_build_args) with Task('IDEConfigCheck', tasks, tags=[Tags.fullbuild]) as t: if t: if args.cleanIDE: mx.command_function('ideclean')([]) mx.command_function('ideinit')([]) with Task('CodeFormatCheck', tasks, tags=[Tags.style]) as t: if t: eclipse_exe = mx.get_env('ECLIPSE_EXE') if eclipse_exe is not None: if mx.command_function('eclipseformat')(['-e', eclipse_exe, '--primary']) != 0: t.abort('Formatter modified files - run "mx eclipseformat", check in changes and repush') else: _warn_or_abort('ECLIPSE_EXE environment variable not set. Cannot execute CodeFormatCheck task.', args.strict_mode) with Task('Checkstyle', tasks, tags=[Tags.style]) as t: if t and mx.command_function('checkstyle')(['--primary']) != 0: t.abort('Checkstyle warnings were found') with Task('SpotBugs', tasks, tags=[Tags.fullbuild]) as t: if t and mx.command_function('spotbugs')([]) != 0: t.abort('FindBugs warnings were found') with Task('VerifyLibraryURLs', tasks, tags=[Tags.fullbuild]) as t: if t: mx.command_function('verifylibraryurls')([]) jacoco_exec = get_jacoco_dest_file() if exists(jacoco_exec): os.unlink(jacoco_exec) if args.jacocout is not None: _jacoco = 'append' else: _jacoco = 'off' for suiteRunner in _gate_runners: suite, runner = suiteRunner if args.all_suites or suite is mx.primary_suite(): runner(args, tasks) if args.jacocout is not None: jacoco_args = [args.jacocout] if args.jacoco_omit_excluded: jacoco_args = ['--omit-excluded'] + jacoco_args mx.command_function('jacocoreport')(jacoco_args) _jacoco = 'off' if args.jacoco_zip is not None: mx.log('Creating JaCoCo report archive: {}'.format(args.jacoco_zip)) with zipfile.ZipFile(args.jacoco_zip, 'w', compression=zipfile.ZIP_DEFLATED) as zf: zf.write(jacoco_exec, join(args.jacocout, jacoco_exec)) for root, _, files in os.walk(args.jacocout): for f in files: zf.write(os.path.join(root, f)) mx.log('Archiving done.')
def _parseVmArgs(args, addDefaultArgs=True): args = mx.expand_project_in_args(args, insitu=False) argsPrefix = [] jacocoArgs = mx_gate.get_jacoco_agent_args() if jacocoArgs: argsPrefix.extend(jacocoArgs) # add default graal.options.file options_file = join(mx.primary_suite().dir, 'graal.options') if exists(options_file): argsPrefix.append('-Dgraal.options.file=' + options_file) if isJDK8: argsPrefix.append('-Djvmci.class.path.append=' + os.pathsep.join((e.get_path() for e in _jvmci_classpath))) argsPrefix.append('-Xbootclasspath/a:' + os.pathsep.join([dep.classpath_repr() for dep in _bootclasspath_appends])) else: deployedDists = [entry.dist() for entry in _jvmci_classpath] + \ [e for e in _bootclasspath_appends if e.isJARDistribution()] deployedModules = [as_java_module(dist, jdk) for dist in deployedDists] # Set or update module path to include Graal and its dependencies as modules graalModulepath = [] for deployedModule in deployedModules: graalModulepath.extend([jmd.jarpath for jmd in deployedModule.modulepath if jmd.jarpath]) graalModulepath.append(deployedModule.jarpath) graalModulepath = _uniqify(graalModulepath) # Update added exports to include concealed JDK packages required by Graal addedExports = {} args = _extract_added_exports(args, addedExports) for deployedModule in deployedModules: for concealingModule, packages in deployedModule.concealedRequires.iteritems(): # No need to explicitly export JVMCI - it's exported via reflection if concealingModule != 'jdk.vm.ci': for package in packages: addedExports.setdefault(concealingModule + '/' + package, set()).add(deployedModule.name) for export, targets in addedExports.iteritems(): argsPrefix.append('--add-exports=' + export + '=' + ','.join(sorted(targets))) # Extend or set --module-path argument mpUpdated = False for mpIndex in range(len(args)): if args[mpIndex] == '--module-path': assert mpIndex + 1 < len(args), 'VM option ' + args[mpIndex] + ' requires an argument' args[mpIndex + 1] = os.pathsep.join(_uniqify(args[mpIndex + 1].split(os.pathsep) + graalModulepath)) mpUpdated = True break elif args[mpIndex].startswith('--module-path='): mp = args[mpIndex][len('--module-path='):] args[mpIndex] = '--module-path=' + os.pathsep.join(_uniqify(mp.split(os.pathsep) + graalModulepath)) mpUpdated = True break if not mpUpdated: argsPrefix.append('--module-path=' + os.pathsep.join(graalModulepath)) if '-version' in args: ignoredArgs = args[args.index('-version') + 1:] if len(ignoredArgs) > 0: mx.log("Warning: The following options will be ignored by the VM because they come after the '-version' argument: " + ' '.join(ignoredArgs)) return jdk.processArgs(argsPrefix + args, addDefaultArgs=addDefaultArgs)
def gate(args): """run the tests used to validate a push If this command exits with a 0 exit code, then the gate passed.""" parser = ArgumentParser(prog='mx gate') add_omit_clean_args(parser) parser.add_argument('--all-suites', action='store_true', help='run gate tasks for all suites, not just the primary suite') parser.add_argument('--dry-run', action='store_true', help='just show the tasks that will be run without running them') parser.add_argument('-x', action='store_true', help='makes --task-filter an exclusion instead of inclusion filter') parser.add_argument('--jacocout', help='specify the output directory for jacoco report') parser.add_argument('--strict-mode', action='store_true', help='abort if a task cannot be executed due to missing tool configuration') filtering = parser.add_mutually_exclusive_group() filtering.add_argument('-t', '--task-filter', help='comma separated list of substrings to select subset of tasks to be run') filtering.add_argument('-s', '--start-at', help='substring to select starting task') for a, k in _extra_gate_arguments: parser.add_argument(*a, **k) args = parser.parse_args(args) cleanArgs = check_gate_noclean_arg(args) global _jacoco if args.dry_run: Task.dryRun = True if args.start_at: Task.startAtFilter = args.start_at elif args.task_filter: Task.filters = args.task_filter.split(',') Task.filtersExclude = args.x elif args.x: mx.abort('-x option cannot be used without --task-filter option') tasks = [] total = Task('Gate') try: with Task('Versions', tasks) as t: if t: mx.command_function('version')(['--oneline']) mx.command_function('sversions')([]) with Task('JDKReleaseInfo', tasks) as t: if t: jdkDirs = os.pathsep.join([mx.get_env('JAVA_HOME', ''), mx.get_env('EXTRA_JAVA_HOMES', '')]) for jdkDir in jdkDirs.split(os.pathsep): release = join(jdkDir, 'release') if exists(release): mx.log('==== ' + jdkDir + ' ====') with open(release) as fp: mx.log(fp.read().strip()) with Task('Pylint', tasks) as t: if t: if mx.command_function('pylint')(['--primary']) != 0: _warn_or_abort('Pylint not configured correctly. Cannot execute Pylint task.', args.strict_mode) gate_clean(cleanArgs, tasks) with Task('Distribution Overlap Check', tasks) as t: if t: if mx.command_function('checkoverlap')([]) != 0: t.abort('Found overlapping distributions.') with Task('Canonicalization Check', tasks) as t: if t: mx.log(time.strftime('%d %b %Y %H:%M:%S - Ensuring mx/projects files are canonicalized...')) if mx.command_function('canonicalizeprojects')([]) != 0: t.abort('Rerun "mx canonicalizeprojects" and check-in the modified mx/suite*.py files.') with Task('BuildJavaWithEcj', tasks) as t: if t: if mx.get_env('JDT'): mx.command_function('build')(['-p', '--no-native', '--warning-as-error']) gate_clean(cleanArgs, tasks, name='CleanAfterEcjBuild') else: _warn_or_abort('JDT environment variable not set. Cannot execute BuildJavaWithEcj task.', args.strict_mode) with Task('BuildJavaWithJavac', tasks) as t: if t: mx.command_function('build')(['-p', '--warning-as-error', '--no-native', '--force-javac']) with Task('IDEConfigCheck', tasks) as t: if t: if args.cleanIDE: mx.command_function('ideclean')([]) mx.command_function('ideinit')([]) with Task('CodeFormatCheck', tasks) as t: if t: eclipse_exe = mx.get_env('ECLIPSE_EXE') if eclipse_exe is not None: if mx.command_function('eclipseformat')(['-e', eclipse_exe, '--primary']) != 0: t.abort('Formatter modified files - run "mx eclipseformat", check in changes and repush') else: _warn_or_abort('ECLIPSE_EXE environment variable not set. Cannot execute CodeFormatCheck task.', args.strict_mode) with Task('Checkstyle', tasks) as t: if t and mx.command_function('checkstyle')(['--primary']) != 0: t.abort('Checkstyle warnings were found') with Task('Checkheaders', tasks) as t: if t and mx.command_function('checkheaders')([]) != 0: t.abort('Checkheaders warnings were found') with Task('FindBugs', tasks) as t: if t and mx.command_function('findbugs')([]) != 0: t.abort('FindBugs warnings were found') if exists('jacoco.exec'): os.unlink('jacoco.exec') if args.jacocout is not None: _jacoco = 'append' else: _jacoco = 'off' for suiteRunner in _gate_runners: suite, runner = suiteRunner if args.all_suites or suite is mx.primary_suite(): runner(args, tasks) if args.jacocout is not None: mx.command_function('jacocoreport')([args.jacocout]) _jacoco = 'off' except KeyboardInterrupt: total.abort(1) except BaseException as e: import traceback traceback.print_exc() total.abort(str(e)) total.stop() mx.log('Gate task times:') for t in tasks: mx.log(' ' + str(t.duration) + '\t' + t.title) mx.log(' =======') mx.log(' ' + str(total.duration)) if args.task_filter: Task.filters = None
def _netbeansinit_project(p, jdks=None, files=None, libFiles=None, dists=None): dists = [] if dists is None else dists nb_dir = mx.ensure_dir_exists(join(p.dir)) nbproject_dir = mx.ensure_dir_exists(join(nb_dir, 'nbproject')) jdk = mx.get_jdk(p.javaCompliance) assert jdk if jdks is not None: jdks.add(jdk) execDir = mx.primary_suite().dir out = mx.XMLDoc() out.open('project', {'name' : p.name, 'default' : 'default', 'basedir' : '.'}) out.element('description', data='Builds, tests, and runs the project ' + p.name + '.') out.element('available', {'file' : 'nbproject/build-impl.xml', 'property' : 'build.impl.exists'}) out.element('import', {'file' : 'nbproject/build-impl.xml', 'optional' : 'true'}) out.element('extension-point', {'name' : '-mx-init'}) out.element('available', {'file' : 'nbproject/build-impl.xml', 'property' : 'mx.init.targets', 'value' : 'init'}) out.element('property', {'name' : 'mx.init.targets', 'value' : ''}) out.element('bindtargets', {'extensionPoint' : '-mx-init', 'targets' : '${mx.init.targets}'}) out.open('target', {'name' : '-post-init'}) out.open('pathconvert', {'property' : 'comma.javac.classpath', 'pathsep' : ','}) out.element('path', {'path' : '${javac.classpath}'}) out.close('pathconvert') out.open('restrict', {'id' : 'missing.javac.classpath'}) out.element('filelist', {'dir' : '${basedir}', 'files' : '${comma.javac.classpath}'}) out.open('not') out.element('exists') out.close('not') out.close('restrict') out.element('property', {'name' : 'missing.javac.classpath', 'refid' : 'missing.javac.classpath'}) out.open('condition', {'property' : 'no.dependencies', 'value' : 'true'}) out.element('equals', {'arg1' : '${missing.javac.classpath}', 'arg2' : ''}) out.close('condition') out.element('property', {'name' : 'no.dependencies', 'value' : 'false'}) out.open('condition', {'property' : 'no.deps'}) out.element('equals', {'arg1' : '${no.dependencies}', 'arg2' : 'true'}) out.close('condition') out.close('target') out.open('target', {'name' : 'clean'}) out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true', 'dir' : execDir}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : 'clean'}) out.element('arg', {'value' : '--projects'}) out.element('arg', {'value' : p.name}) out.close('exec') out.close('target') out.open('target', {'name' : 'compile'}) out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true', 'dir' : execDir}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : 'build'}) dependsOn = p.name for d in dists: dependsOn = dependsOn + ',' + d.name out.element('arg', {'value' : '--only'}) out.element('arg', {'value' : dependsOn}) out.element('arg', {'value' : '--force-javac'}) out.element('arg', {'value' : '--no-native'}) out.element('arg', {'value' : '--no-daemon'}) out.close('exec') out.close('target') out.open('target', {'name' : 'package', 'if' : 'build.impl.exists'}) out.element('antcall', {'target': '-package', 'inheritall': 'true', 'inheritrefs': 'true'}) out.close('target') out.open('target', {'name' : '-package', 'depends' : '-mx-init'}) out.element('loadfile', {'srcFile' : join(p.suite.get_output_root(), 'netbeans.log'), 'property' : 'netbeans.log', 'failonerror' : 'false'}) out.element('echo', {'message' : '...truncated...${line.separator}', 'output' : join(p.suite.get_output_root(), 'netbeans.log')}) out.element('echo', {'message' : '${netbeans.log}'}) for d in dists: if d.isDistribution(): out.element('touch', {'file' : '${java.io.tmpdir}/' + d.name}) out.element('echo', {'message' : d.name + ' set to now${line.separator}', 'append' : 'true', 'output' : join(p.suite.get_output_root(), 'netbeans.log')}) out.open('copy', {'todir' : '${build.classes.dir}', 'overwrite' : 'true'}) out.element('resources', {'refid' : 'changed.files'}) out.close('copy') if len(p.annotation_processors()) > 0: out.open('copy', {'todir' : '${src.ap-source-output.dir}'}) out.open('fileset', {'dir': '${cos.src.dir.internal}/../sources/'}) out.element('include', {'name': '**/*.java'}) out.close('fileset') out.close('copy') out.open('exec', {'executable' : '${ant.home}/bin/ant', 'spawn' : 'true'}) out.element('arg', {'value' : '-f'}) out.element('arg', {'value' : '${ant.file}'}) out.element('arg', {'value' : 'packagelater'}) out.close('exec') out.close('target') for d in dists: if d.isDistribution(): out.open('target', {'name' : 'checkpackage-' + d.name}) out.open('tstamp') out.element('format', {'pattern' : 'S', 'unit' : 'millisecond', 'property' : 'at.' + d.name}) out.close('tstamp') out.element('touch', {'file' : '${java.io.tmpdir}/' + d.name, 'millis' : '${at.' + d.name + '}0000'}) out.element('echo', {'message' : d.name + ' touched to ${at.' + d.name + '}0000${line.separator}', 'append' : 'true', 'output' : join(p.suite.get_output_root(), 'netbeans.log')}) out.element('sleep', {'seconds' : '3'}) out.open('condition', {'property' : 'mx.' + d.name, 'value' : sys.executable}) out.open('islastmodified', {'millis' : '${at.' + d.name + '}0000', 'mode' : 'equals'}) out.element('file', {'file' : '${java.io.tmpdir}/' + d.name}) out.close('islastmodified') out.close('condition') out.element('echo', {'message' : d.name + ' defined as ' + '${mx.' + d.name + '}${line.separator}', 'append' : 'true', 'output' : join(p.suite.get_output_root(), 'netbeans.log')}) out.close('target') out.open('target', {'name' : 'packagelater-' + d.name, 'depends' : 'checkpackage-' + d.name, 'if' : 'mx.' + d.name}) out.open('exec', {'executable' : '${mx.' + d.name + '}', 'failonerror' : 'true', 'dir' : execDir, 'output' : join(p.suite.get_output_root(), 'netbeans.log'), 'append' : 'true'}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : 'build'}) out.element('arg', {'value' : '-f'}) out.element('arg', {'value' : '--only'}) out.element('arg', {'value' : d.name}) out.element('arg', {'value' : '--force-javac'}) out.element('arg', {'value' : '--no-native'}) out.element('arg', {'value' : '--no-daemon'}) out.close('exec') out.close('target') dependsOn = '' sep = '' for d in dists: dependsOn = dependsOn + sep + 'packagelater-' + d.name sep = ',' out.open('target', {'name' : 'packagelater', 'depends' : dependsOn}) out.close('target') out.open('target', {'name' : 'jar', 'depends' : 'compile'}) out.close('target') out.element('target', {'name' : 'test', 'depends' : 'run'}) out.element('target', {'name' : 'test-single', 'depends' : 'run'}) out.open('target', {'name' : 'run'}) out.element('property', {'name' : 'test.class', 'value' : p.name}) out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true', 'dir' : execDir}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : 'unittest'}) out.element('arg', {'value' : '${test.class}'}) out.close('exec') out.close('target') out.element('target', {'name' : 'debug-test', 'depends' : 'debug'}) out.open('target', {'name' : 'debug', 'depends' : '-mx-init'}) out.element('property', {'name' : 'test.class', 'value' : p.name}) out.open('nbjpdastart', {'addressproperty' : 'jpda.address', 'name' : p.name}) out.open('classpath') out.open('fileset', {'dir' : '..'}) out.element('include', {'name' : '*/bin/'}) out.close('fileset') out.close('classpath') out.open('sourcepath') out.element('pathelement', {'location' : 'src'}) out.close('sourcepath') out.close('nbjpdastart') out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true', 'dir' : execDir}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : '-d'}) out.element('arg', {'value' : '--attach'}) out.element('arg', {'value' : '${jpda.address}'}) out.element('arg', {'value' : 'unittest'}) out.element('arg', {'value' : '${test.class}'}) out.close('exec') out.close('target') out.open('target', {'name' : 'javadoc'}) out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true', 'dir' : execDir}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : 'javadoc'}) out.element('arg', {'value' : '--projects'}) out.element('arg', {'value' : p.name}) out.element('arg', {'value' : '--force'}) out.close('exec') out.element('nbbrowse', {'file' : 'javadoc/index.html'}) out.close('target') out.close('project') mx.update_file(join(nb_dir, 'build.xml'), out.xml(indent='\t', newl='\n')) if files is not None: files.append(join(nb_dir, 'build.xml')) out = mx.XMLDoc() out.open('project', {'xmlns' : 'http://www.netbeans.org/ns/project/1'}) out.element('type', data='org.netbeans.modules.java.j2seproject') out.open('configuration') out.open('data', {'xmlns' : 'http://www.netbeans.org/ns/j2se-project/3'}) out.element('name', data=p.name) out.element('explicit-platform', {'explicit-source-supported' : 'true'}) out.open('source-roots') out.element('root', {'id' : 'src.dir'}) if len(p.annotation_processors()) > 0: out.element('root', {'id' : 'src.ap-source-output.dir', 'name' : 'Generated Packages'}) out.close('source-roots') out.open('test-roots') out.close('test-roots') out.close('data') firstDep = [] def processDep(dep, edge): if dep is p: return if dep.isProject(): n = dep.name.replace('.', '_') if not firstDep: out.open('references', {'xmlns' : 'http://www.netbeans.org/ns/ant-project-references/1'}) firstDep.append(dep) out.open('reference') out.element('foreign-project', data=n) out.element('artifact-type', data='jar') out.element('script', data='build.xml') out.element('target', data='jar') out.element('clean-target', data='clean') out.element('id', data='jar') out.close('reference') #pylint: disable=too-many-function-args p.walk_deps(visit=processDep, ignoredEdges=[mx.DEP_EXCLUDED]) if firstDep: out.close('references') out.close('configuration') out.close('project') mx.update_file(join(nbproject_dir, 'project.xml'), out.xml(indent=' ', newl='\n')) if files is not None: files.append(join(nbproject_dir, 'project.xml')) out = StringIO() jdkPlatform = 'JDK_' + str(jdk.version) annotationProcessorEnabled = "false" annotationProcessorSrcFolder = "" annotationProcessorSrcFolderRef = "" if len(p.annotation_processors()) > 0: annotationProcessorEnabled = "true" mx.ensure_dir_exists(p.source_gen_dir()) annotationProcessorSrcFolder = os.path.relpath(p.source_gen_dir(), nb_dir) annotationProcessorSrcFolder = annotationProcessorSrcFolder.replace('\\', '\\\\') annotationProcessorSrcFolderRef = "src.ap-source-output.dir=" + annotationProcessorSrcFolder canSymlink = not (mx.is_windows() or mx.is_cygwin()) and 'symlink' in dir(os) if canSymlink: nbBuildDir = join(nbproject_dir, 'build') apSourceOutRef = "annotation.processing.source.output=" + annotationProcessorSrcFolder if os.path.lexists(nbBuildDir): os.unlink(nbBuildDir) os.symlink(p.output_dir(), nbBuildDir) else: nbBuildDir = p.output_dir() apSourceOutRef = "" mx.ensure_dir_exists(p.output_dir()) mx_ide_eclipse._copy_eclipse_settings(nb_dir, p) content = """ annotation.processing.enabled=""" + annotationProcessorEnabled + """ annotation.processing.enabled.in.editor=""" + annotationProcessorEnabled + """ """ + apSourceOutRef + """ annotation.processing.processors.list= annotation.processing.run.all.processors=true application.title=""" + p.name + """ application.vendor=mx auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.eclipseFormatterActiveProfile= auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.eclipseFormatterEnabled=true auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.eclipseFormatterLocation= auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.enableFormatAsSaveAction=true auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.linefeed= auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.preserveBreakPoints=true auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.SaveActionModifiedLinesOnly=false auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.showNotifications=false auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.sourcelevel= auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.useProjectPref=true auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.useProjectSettings=true auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.eclipseFormatterActiveProfile= auxiliary.org-netbeans-spi-editor-hints-projects.perProjectHintSettingsEnabled=true auxiliary.org-netbeans-spi-editor-hints-projects.perProjectHintSettingsFile=nbproject/cfg_hints.xml build.classes.dir=${build.dir} build.classes.excludes=**/*.java,**/*.form # This directory is removed when the project is cleaned: build.dir=""" + nbBuildDir + """ $cos.update=package $cos.update.resources=changed.files compile.on.save=true build.generated.sources.dir=${build.dir}/generated-sources # Only compile against the classpath explicitly listed here: build.sysclasspath=ignore build.test.classes.dir=${build.dir}/test/classes build.test.results.dir=${build.dir}/test/results # Uncomment to specify the preferred debugger connection transport: #debug.transport=dt_socket debug.classpath=\\ ${run.classpath} debug.test.classpath=\\ ${run.test.classpath} # This directory is removed when the project is cleaned: dist.dir=dist dist.jar=${dist.dir}/""" + p.name + """.jar dist.javadoc.dir=${dist.dir}/javadoc endorsed.classpath= excludes= includes=** jar.compress=false java.main.action=test # Space-separated list of extra javac options javac.compilerargs=-XDignore.symbol.file javac.deprecation=false javac.source=""" + str(p.javaCompliance) + """ javac.target=""" + str(p.javaCompliance) + """ javac.test.classpath=\\ ${javac.classpath}:\\ ${build.classes.dir} javadoc.additionalparam= javadoc.author=false javadoc.encoding=${source.encoding} javadoc.noindex=false javadoc.nonavbar=false javadoc.notree=false javadoc.private=false javadoc.splitindex=true javadoc.use=true javadoc.version=false javadoc.windowtitle= manifest.file=manifest.mf meta.inf.dir=${src.dir}/META-INF mkdist.disabled=false platforms.""" + jdkPlatform + """.home=""" + jdk.home + """ platform.active=""" + jdkPlatform + """ run.classpath=\\ ${javac.classpath}:\\ ${build.classes.dir} # Space-separated list of JVM arguments used when running the project # (you may also define separate properties like run-sys-prop.name=value instead of -Dname=value # or test-sys-prop.name=value to set system properties for unit tests): run.jvmargs= run.test.classpath=\\ ${javac.test.classpath}:\\ ${build.test.classes.dir} test.src.dir=./test """ + annotationProcessorSrcFolderRef + """ source.encoding=UTF-8""".replace(':', os.pathsep).replace('/', os.sep) print(content, file=out) # Workaround for NetBeans "too clever" behavior. If you want to be # able to press F6 or Ctrl-F5 in NetBeans and run/debug unit tests # then the project must have its main.class property set to an # existing class with a properly defined main method. Until this # behavior is remedied, we specify a well known Truffle class # that will be on the class path for most Truffle projects. # This can be overridden by defining a netbeans.project.properties # attribute for a project in suite.py (see below). print("main.class=com.oracle.truffle.api.impl.Accessor", file=out) # Add extra properties specified in suite.py for this project if hasattr(p, 'netbeans.project.properties'): properties = getattr(p, 'netbeans.project.properties') for prop in [properties] if isinstance(properties, str) else properties: print(prop, file=out) mainSrc = True for src in p.srcDirs: srcDir = mx.ensure_dir_exists(join(p.dir, src)) ref = 'file.reference.' + p.name + '-' + src print(ref + '=' + os.path.relpath(srcDir, nb_dir), file=out) if mainSrc: print('src.dir=${' + ref + '}', file=out) mainSrc = False else: print('src.' + src + '.dir=${' + ref + '}', file=out) javacClasspath = [] def newDepsCollector(into): return lambda dep, edge: into.append(dep) if dep.isLibrary() or dep.isJdkLibrary() or dep.isProject() or dep.isClasspathDependency() else None deps = [] p.walk_deps(visit=newDepsCollector(deps)) annotationProcessorOnlyDeps = [] if len(p.annotation_processors()) > 0: for apDep in p.annotation_processors(): resolvedApDeps = [] apDep.walk_deps(visit=newDepsCollector(resolvedApDeps)) for resolvedApDep in resolvedApDeps: if not resolvedApDep in deps: deps.append(resolvedApDep) annotationProcessorOnlyDeps.append(resolvedApDep) annotationProcessorReferences = [] for dep in deps: if dep == p: continue if dep.isLibrary() or dep.isJdkLibrary(): if dep.isLibrary(): path = dep.get_path(resolve=True) sourcePath = dep.get_source_path(resolve=True) else: path = dep.classpath_repr(jdk, resolve=True) sourcePath = dep.get_source_path(jdk) if path: if os.sep == '\\': path = path.replace('\\', '\\\\') ref = 'file.reference.' + dep.name + '-bin' print(ref + '=' + path, file=out) if libFiles: libFiles.append(path) if sourcePath: if os.sep == '\\': sourcePath = sourcePath.replace('\\', '\\\\') print('source.reference.' + dep.name + '-bin=' + sourcePath, file=out) elif dep.isMavenProject(): path = dep.get_path(resolve=False) if path: if os.sep == '\\': path = path.replace('\\', '\\\\') ref = 'file.reference.' + dep.name + '-bin' print(ref + '=' + path, file=out) elif dep.isProject(): n = dep.name.replace('.', '_') relDepPath = os.path.relpath(dep.dir, nb_dir).replace(os.sep, '/') if canSymlink: depBuildPath = join('nbproject', 'build') else: depBuildPath = 'dist/' + dep.name + '.jar' ref = 'reference.' + n + '.jar' print('project.' + n + '=' + relDepPath, file=out) print(ref + '=${project.' + n + '}/' + depBuildPath, file=out) elif dep.isJreLibrary(): continue elif dep.isClasspathDependency(): extra = [di for di in dep.deps if di not in deps] if dep.isDistribution() and dep.deps and not extra: # ignore distribution classpath dependencies that only contain other explicit depedencies continue path = dep.classpath_repr(resolve=True) sourcePath = dep.get_source_path(jdk) if hasattr(dep, 'get_source_path') else None if path: if os.sep == '\\': path = path.replace('\\', '\\\\') ref = 'file.reference.' + dep.name + '-bin' print(ref + '=' + path, file=out) if libFiles: libFiles.append(path) if sourcePath: if os.sep == '\\': sourcePath = sourcePath.replace('\\', '\\\\') print('source.reference.' + dep.name + '-bin=' + sourcePath, file=out) if not dep in annotationProcessorOnlyDeps: javacClasspath.append('${' + ref + '}') else: annotationProcessorReferences.append('${' + ref + '}') print('javac.classpath=\\\n ' + (os.pathsep + '\\\n ').join(javacClasspath), file=out) print('javac.processorpath=' + (os.pathsep + '\\\n ').join(['${javac.classpath}'] + annotationProcessorReferences), file=out) print('javac.test.processorpath=' + (os.pathsep + '\\\n ').join(['${javac.test.classpath}'] + annotationProcessorReferences), file=out) mx.update_file(join(nbproject_dir, 'project.properties'), out.getvalue()) out.close() if files is not None: files.append(join(nbproject_dir, 'project.properties')) for source in p.suite.netbeans_settings_sources().get('cfg_hints.xml'): with open(source) as fp: content = fp.read() mx.update_file(join(nbproject_dir, 'cfg_hints.xml'), content) if files is not None: files.append(join(p.dir, 'nbproject', 'cfg_hints.xml'))
def _run_tests(args, harness, vmLauncher, annotations, testfile, blacklist, whitelist, regex, suite): vmArgs, tests = mx.extract_VM_args(args) for t in tests: if t.startswith('-'): mx.abort('VM option ' + t + ' must precede ' + tests[0]) # this is what should be used compat_suite = suite if suite else mx.primary_suite() if suite != mx._mx_suite and compat_suite.getMxCompatibility().useDistsForUnittest(): jar_distributions = [d for d in mx.sorted_dists() if d.isJARDistribution() and (not suite or d.suite == suite)] # find a corresponding distribution for each test candidates = _find_classes_by_annotated_methods(annotations, jar_distributions, vmLauncher.jdk()) else: binary_deps = [d for d in mx.dependencies(opt_limit_to_suite=True) if d.isJARDistribution() and isinstance(d.suite, mx.BinarySuite) and (not suite or suite == d.suite)] candidates = _find_classes_by_annotated_methods(annotations, binary_deps, vmLauncher.jdk()) for p in mx.projects(opt_limit_to_suite=True): if not p.isJavaProject(): continue if suite and not p.suite == suite: continue if vmLauncher.jdk().javaCompliance < p.javaCompliance: continue for c in _find_classes_with_annotations(p, None, annotations): candidates[c] = p classes = [] if len(tests) == 0: classes = candidates.keys() depsContainingTests = set(candidates.values()) else: depsContainingTests = set() found = False if len(tests) == 1 and '#' in tests[0]: words = tests[0].split('#') if len(words) != 2: mx.abort("Method specification is class#method: " + tests[0]) t, method = words for c, p in candidates.iteritems(): # prefer exact matches first if t == c: found = True classes.append(c) depsContainingTests.add(p) if not found: for c, p in candidates.iteritems(): if t in c: found = True classes.append(c) depsContainingTests.add(p) if not found: mx.log('warning: no tests matched by substring: ' + t) elif len(classes) != 1: mx.abort('More than one test matches substring {0} {1}'.format(t, classes)) classes = [c + "#" + method for c in classes] else: for t in tests: if '#' in t: mx.abort('Method specifications can only be used in a single test: ' + t) for c, p in candidates.iteritems(): if t in c: found = True classes.append(c) depsContainingTests.add(p) if not found: mx.log('warning: no tests matched by substring: ' + t) if blacklist: classes = [c for c in classes if not any((glob.match(c) for glob in blacklist))] if whitelist: classes = [c for c in classes if any((glob.match(c) for glob in whitelist))] if regex: classes = [c for c in classes if re.search(regex, c)] if len(classes) != 0: f_testfile = open(testfile, 'w') for c in classes: f_testfile.write(c + '\n') f_testfile.close() harness(depsContainingTests, vmLauncher, vmArgs)
def _parseVmArgs(args, addDefaultArgs=True): args = mx.expand_project_in_args(args, insitu=False) argsPrefix = [] jacocoArgs = mx_gate.get_jacoco_agent_args() if jacocoArgs: argsPrefix.extend(jacocoArgs) # Check for -G: options def checkGOption(arg): if arg.startswith('-G:+'): if '=' in arg: mx.abort('Mixing + and = in -G: option specification: ' + arg) translation = '-Dgraal.' + arg[len('-G:+'):] + '=true' elif arg.startswith('-G:-'): if '=' in arg: mx.abort('Mixing - and = in -G: option specification: ' + arg) translation = '-Dgraal.' + arg[len('-G:+'):] + '=false' elif arg.startswith('-G:'): if '=' not in arg: mx.abort('Missing "=" in non-boolean -G: option specification: ' + arg) translation = '-Dgraal.' + arg[len('-G:'):] else: return arg mx.warn('Support for -G options is deprecated and will soon be removed. Replace "' + arg + '" with "' + translation + '"') return translation # add default graal.options.file options_file = join(mx.primary_suite().dir, 'graal.options') if exists(options_file): argsPrefix.append('-Dgraal.options.file=' + options_file) args = [checkGOption(a) for a in args] if '-Dgraal.PrintFlags=true' in args and '-Xcomp' not in args: mx.warn('Using -Dgraal.PrintFlags=true may have no effect without -Xcomp as Graal initialization is lazy') if isJDK8: argsPrefix.append('-Djvmci.class.path.append=' + os.pathsep.join((e.get_path() for e in _jvmci_classpath))) argsPrefix.append('-Xbootclasspath/a:' + os.pathsep.join([dep.classpath_repr() for dep in _bootclasspath_appends])) else: deployedDists = [entry.dist() for entry in _jvmci_classpath] + \ [e for e in _bootclasspath_appends if e.isJARDistribution()] deployedModules = [as_java_module(dist, jdk) for dist in deployedDists] # Set or update module path to include Graal and its dependencies as modules graalModulepath = [] for deployedModule in deployedModules: graalModulepath.extend([jmd.jarpath for jmd in deployedModule.modulepath if jmd.jarpath]) graalModulepath.append(deployedModule.jarpath) graalModulepath = _uniqify(graalModulepath) # Update added exports to include concealed JDK packages required by Graal addedExports = {} args = _extract_added_exports(args, addedExports) for deployedModule in deployedModules: for concealingModule, packages in deployedModule.concealedRequires.iteritems(): # No need to explicitly export JVMCI - it's exported via reflection if concealingModule != 'jdk.vm.ci': for package in packages: addedExports.setdefault(concealingModule + '/' + package, set()).add(deployedModule.name) for export, targets in addedExports.iteritems(): argsPrefix.append('-XaddExports:' + export + '=' + ','.join(sorted(targets))) # Extend or set -modulepath argument mpUpdated = False for mpIndex in range(len(args)): if args[mpIndex] in ['-modulepath', '-mp']: assert mpIndex + 1 < len(args), 'VM option ' + args[mpIndex] + ' requires an argument' args[mpIndex + 1] = os.pathsep.join(_uniqify(args[mpIndex + 1].split(os.pathsep) + graalModulepath)) mpUpdated = True break if not mpUpdated: argsPrefix.append('-modulepath') argsPrefix.append(os.pathsep.join(graalModulepath)) # Set the JVMCI compiler to Graal argsPrefix.append('-Djvmci.Compiler=graal') if '-version' in args: ignoredArgs = args[args.index('-version') + 1:] if len(ignoredArgs) > 0: mx.log("Warning: The following options will be ignored by the VM because they come after the '-version' argument: " + ' '.join(ignoredArgs)) return jdk.processArgs(argsPrefix + args, addDefaultArgs=addDefaultArgs)
def _bisect_benchmark(argv, bisect_id, email_to): if 'BISECT_BENCHMARK_CONFIG' in os.environ: import configparser cp = configparser.ConfigParser() cp.read(os.environ['BISECT_BENCHMARK_CONFIG']) sec = cp['bisect-benchmark'] args = types.SimpleNamespace() args.bad = sec['bad'] args.good = sec['good'] args.build_command = sec['build_command'] args.benchmark_command = sec['benchmark_command'] args.benchmark_criterion = sec.get('benchmark_criterion', 'BEST') args.enterprise = sec.getboolean('enterprise', False) args.no_clean = sec.getboolean('no_clean', False) args.rerun_with_commands = sec.get('rerun_with_commands') else: parser = argparse.ArgumentParser() parser.add_argument('bad', help="Bad commit for bisection") parser.add_argument('good', help="Good commit for bisection") parser.add_argument('build_command', help="Command to run in order to build the configuration") parser.add_argument('benchmark_command', help="Command to run in order to run the benchmark. Output needs to be in mx's format") parser.add_argument('--rerun-with-commands', help="Re-run the bad and good commits with this benchmark command(s) " "(multiple commands separated by ';')") parser.add_argument('--benchmark-criterion', default='BEST', help="Which result parameter should be used for comparisons") parser.add_argument('--enterprise', action='store_true', help="Whether to checkout graal-enterprise") parser.add_argument('--no-clean', action='store_true', help="Do not run 'mx clean' between runs") args = parser.parse_args(argv) primary_suite = mx.primary_suite() def checkout_enterprise(): suite = get_suite('graalpython') ee_suite = get_suite('/vm-enterprise') overlays = '../ci-overlays' if not os.path.isdir(overlays): sys.exit("Needs to have ci-overlays checkout") with open(os.path.join(get_suite("graalpython").dir, "ci.jsonnet")) as f: overlay_rev = json.load(f)['overlay'] suite.vc.update_to_branch(overlays, overlay_rev) constants_file = os.path.join(overlays, 'python/imported-constants.json') with open(constants_file) as f: ee_rev = json.load(f)['GRAAL_ENTERPRISE_REVISION'] ee_suite.vc.update_to_branch(ee_suite.vc_dir, ee_rev) def checkout_suite(suite, commit): suite.vc.update_to_branch(suite.vc_dir, commit) mx.run_mx(['sforceimports'], suite=suite) mx.run_mx(['--env', 'ce', 'sforceimports'], suite=get_suite('/vm')) if args.enterprise and suite.name != 'vm-enterprise': checkout_enterprise() # Make sure vm is imported before vm-enterprise get_suite('/vm') mx.run_mx(['--env', 'ee', 'sforceimports'], suite=get_suite('/vm-enterprise')) suite.vc.update_to_branch(suite.vc_dir, commit) mx.run_mx(['sforceimports'], suite=suite) debug_str = "debug: graalpython={} graal={}".format( get_commit(get_suite('graalpython')), get_commit(get_suite('/vm'))) if args.enterprise: debug_str += " graal-enterprise={}".format(get_commit(get_suite('/vm-enterprise'))) print(debug_str) def checkout_and_build_suite(suite, commit): checkout_suite(suite, commit) build_command = shlex.split(args.build_command) if not args.no_clean: try: clean_command = build_command[:build_command.index('build')] + ['clean'] retcode = mx.run(clean_command, nonZeroIsFatal=False) if retcode: print("Warning: clean command failed") except ValueError: pass retcode = mx.run(build_command, nonZeroIsFatal=False) if retcode: raise RuntimeError("Failed to execute the build command for {}".format(commit)) def benchmark_callback(suite, commit, bench_command=args.benchmark_command): checkout_and_build_suite(suite, commit) output = mx.OutputCapture() retcode = mx.run(shlex.split(bench_command), out=mx.TeeOutputCapture(output), nonZeroIsFatal=False) if retcode: if args.benchmark_criterion == 'WORKS': return sys.maxsize else: raise RuntimeError("Failed to execute benchmark for {}".format(commit)) elif args.benchmark_criterion == 'WORKS': return 0 match = re.search(r'{}.*duration: ([\d.]+)'.format(re.escape(args.benchmark_criterion)), output.data) if not match: raise RuntimeError("Failed to get result from the benchmark") return float(match.group(1)) bad = get_commit(primary_suite, args.bad) good = get_commit(primary_suite, args.good) result = run_bisect_benchmark(primary_suite, bad, good, benchmark_callback) visualization = result.visualize() summary = result.summarize() print() print(visualization) print() print(summary) if args.rerun_with_commands: print('\n\nRerunning the good and bad commits with extra benchmark commands:') current_result = result current_suite = primary_suite while current_result.subresults and current_result.bad_index in current_result.subresults: downstream_suite = get_downstream_suite(current_suite) next_result = current_result.subresults[current_result.bad_index] if not next_result.good_commit or not next_result.bad_commit: print("Next downstream suite {} does not have both good and bad commits".format(downstream_suite.name)) break print("Recursing to downstream suite: {}, commit: {}".format(downstream_suite.name, current_result.bad_commit)) checkout_suite(current_suite, current_result.bad_commit) current_result = next_result current_suite = downstream_suite for commit in [current_result.good_commit, current_result.bad_commit]: print_line(80) print("Commit: {}".format(commit)) checkout_and_build_suite(current_suite, commit) for cmd in args.rerun_with_commands.split(";"): print_line(40) mx.run(shlex.split(cmd.strip()), nonZeroIsFatal=False) send_email( bisect_id, email_to, "Bisection job has finished successfully.\n{}\n".format(summary) + "Note I'm just a script and I don't validate statistical significance of the above result.\n" + "Please take a moment to also inspect the detailed results below.\n\n{}\n\n".format(visualization) + os.environ.get('BUILD_URL', 'Unknown URL') )
def _build(args): mx.primary_suite().build(args)
def gate(args): """run the tests used to validate a push If this command exits with a 0 exit code, then the gate passed.""" parser = ArgumentParser(prog='mx gate') add_omit_clean_args(parser) parser.add_argument('--all-suites', action='store_true', help='run gate tasks for all suites, not just the primary suite') parser.add_argument('--dry-run', action='store_true', help='just show the tasks that will be run without running them') parser.add_argument('-x', action='store_true', help='makes --task-filter or --tags an exclusion instead of inclusion filter') parser.add_argument('--jacocout', help='specify the output directory for jacoco report') parser.add_argument('--strict-mode', action='store_true', help='abort if a task cannot be executed due to missing tool configuration') parser.add_argument('-B', dest='extra_build_args', action='append', metavar='<build_args>', help='append additional arguments to mx build commands used in the gate') filtering = parser.add_mutually_exclusive_group() filtering.add_argument('-t', '--task-filter', help='comma separated list of substrings to select subset of tasks to be run') filtering.add_argument('-s', '--start-at', help='substring to select starting task') filtering.add_argument('--tags', help='comma separated list of tags to select subset of tasks to be run. Tags can have a range specifier `name[:from:[to]]`.' 'If present only the [from,to) tasks are executed. If `to` is omitted all tasks starting with `from` are executed.') for a, k in _extra_gate_arguments: parser.add_argument(*a, **k) args = parser.parse_args(args) cleanArgs = check_gate_noclean_arg(args) global _jacoco if args.dry_run: Task.dryRun = True if args.start_at: Task.startAtFilter = args.start_at elif args.task_filter: Task.filters = args.task_filter.split(',') Task.filtersExclude = args.x elif args.tags: parse_tags_argument(args.tags, args.x) Task.tagsExclude = args.x if not Task.tagsExclude: # implicitly include 'always' Task.tags += [Tags.always] elif args.x: mx.abort('-x option cannot be used without --task-filter or the --tags option') if not args.extra_build_args: args.extra_build_args = [] tasks = [] total = Task('Gate') try: with Task('Versions', tasks, tags=[Tags.always]) as t: if t: mx.command_function('version')(['--oneline']) mx.command_function('sversions')([]) with Task('JDKReleaseInfo', tasks, tags=[Tags.always]) as t: if t: jdkDirs = os.pathsep.join([mx.get_env('JAVA_HOME', ''), mx.get_env('EXTRA_JAVA_HOMES', '')]) for jdkDir in jdkDirs.split(os.pathsep): release = join(jdkDir, 'release') if exists(release): mx.log('==== ' + jdkDir + ' ====') with open(release) as fp: mx.log(fp.read().strip()) for suiteRunner in _pre_gate_runners: suite, runner = suiteRunner if args.all_suites or suite is mx.primary_suite(): runner(args, tasks) with Task('Pylint', tasks, tags=[Tags.style]) as t: if t: if mx.command_function('pylint')(['--primary']) != 0: _warn_or_abort('Pylint not configured correctly. Cannot execute Pylint task.', args.strict_mode) gate_clean(cleanArgs, tasks, tags=[Tags.build, Tags.fullbuild]) with Task('Distribution Overlap Check', tasks, tags=[Tags.style]) as t: if t: if mx.command_function('checkoverlap')([]) != 0: t.abort('Found overlapping distributions.') with Task('Canonicalization Check', tasks, tags=[Tags.style]) as t: if t: mx.log(time.strftime('%d %b %Y %H:%M:%S - Ensuring mx/projects files are canonicalized...')) if mx.command_function('canonicalizeprojects')([]) != 0: t.abort('Rerun "mx canonicalizeprojects" and modify the suite.py files as suggested.') with Task('Verify Java Sources in Project', tasks, tags=[Tags.style]) as t: if t: mx.log(time.strftime('%d %b %Y %H:%M:%S - Ensuring all Java sources are in a Java project directory...')) if mx.command_function('verifysourceinproject')([]) != 0: t.abort('Move or delete the Java sources that are not in a Java project directory.') if mx._is_supported_by_jdt(mx.DEFAULT_JDK_TAG): with Task('BuildWithEcj', tasks, tags=[Tags.fullbuild], legacyTitles=['BuildJavaWithEcj']) as t: if t: if mx.get_env('JDT'): mx.command_function('build')(['-p', '--warning-as-error'] + args.extra_build_args) gate_clean(cleanArgs, tasks, name='CleanAfterEcjBuild', tags=[Tags.fullbuild]) else: _warn_or_abort('JDT environment variable not set. Cannot execute BuildWithEcj task.', args.strict_mode) with Task('BuildWithJavac', tasks, tags=[Tags.build, Tags.fullbuild], legacyTitles=['BuildJavaWithJavac']) as t: if t: mx.command_function('build')(['-p', '--warning-as-error', '--force-javac'] + args.extra_build_args) with Task('IDEConfigCheck', tasks, tags=[Tags.fullbuild]) as t: if t: if args.cleanIDE: mx.command_function('ideclean')([]) mx.command_function('ideinit')([]) with Task('CodeFormatCheck', tasks, tags=[Tags.style]) as t: if t: eclipse_exe = mx.get_env('ECLIPSE_EXE') if eclipse_exe is not None: if mx.command_function('eclipseformat')(['-e', eclipse_exe, '--primary']) != 0: t.abort('Formatter modified files - run "mx eclipseformat", check in changes and repush') else: _warn_or_abort('ECLIPSE_EXE environment variable not set. Cannot execute CodeFormatCheck task.', args.strict_mode) with Task('Checkstyle', tasks, tags=[Tags.style]) as t: if t and mx.command_function('checkstyle')(['--primary']) != 0: t.abort('Checkstyle warnings were found') with Task('Checkheaders', tasks, tags=[Tags.style]) as t: if t and mx.command_function('checkheaders')([]) != 0: t.abort('Checkheaders warnings were found') with Task('FindBugs', tasks, tags=[Tags.fullbuild]) as t: if t and mx.command_function('findbugs')([]) != 0: t.abort('FindBugs warnings were found') with Task('VerifyLibraryURLs', tasks, tags=[Tags.fullbuild]) as t: if t: mx.command_function('verifylibraryurls')([]) if mx._primary_suite is mx._mx_suite: with Task('TestJMH', tasks, tags=[Tags.fullbuild]) as t: if t: mx_microbench.get_microbenchmark_executor().microbench(['--', '-foe', 'true', 'com.oracle.mxtool.bench.TestJMH']) if exists('jacoco.exec'): os.unlink('jacoco.exec') if args.jacocout is not None: _jacoco = 'append' else: _jacoco = 'off' for suiteRunner in _gate_runners: suite, runner = suiteRunner if args.all_suites or suite is mx.primary_suite(): runner(args, tasks) if args.jacocout is not None: mx.command_function('jacocoreport')([args.jacocout]) _jacoco = 'off' except KeyboardInterrupt: total.abort(1) except BaseException as e: import traceback traceback.print_exc() total.abort(str(e)) total.stop() mx.log('Gate task times:') for t in tasks: mx.log(' ' + str(t.duration) + '\t' + t.title + ("" if not (Task.verbose and t.tags) else (' [' + ','.join(t.tags) + ']'))) mx.log(' =======') mx.log(' ' + str(total.duration)) if args.task_filter: Task.filters = None
def _parse_fetchsettings(args): settings = {} settings["keep-archive"] = False settings["base-path"] = default_base_path() common_location = join(_mx_home, 'common.json') parser = ArgumentParser(prog='mx fetch-jdk') parser.add_argument( '--java-distribution', action='store', help= 'JDK distribution that should be downloaded (e.g., "labsjdk-ce-11" or "openjdk8")' ) parser.add_argument( '--configuration', action='store', help='location of configuration json file (default: \'{}\')'.format( common_location)) parser.add_argument( '--to', action='store', help='location where JDK would be downloaded (default: \'{}\')'.format( settings["base-path"])) parser.add_argument('--alias', action='store', help='name of symlink to JDK') parser.add_argument('--keep-archive', action='store_true', help='keep downloaded JDK archive') if mx.is_darwin(): parser.add_argument('--strip-contents-home', action='store_true', help='strip Contents/Home') parser.add_argument('remainder', nargs=REMAINDER, metavar='...') args = parser.parse_args(args) if args.to is not None: settings["base-path"] = args.to if not check_write_access(settings["base-path"]): mx.abort( "JDK installation directory {} is not writeable.".format( settings["base-path"]) + os.linesep + "Either re-run with elevated privileges (e.g. sudo) or specify a writeable directory with the --to option." ) if args.configuration is not None: common_location = args.configuration else: if mx.primary_suite() is not None: common_location = join(mx.primary_suite().vc_dir, 'common.json') # Try fetching suite config else: common_location = join(os.getcwd(), 'common.json') # Fallback to same folder if not exists(common_location): common_location = join(_mx_home, 'common.json') # Fallback to mx mx.warn( "Selected `{}` as configuration location, since no location is provided" .format(common_location)) if not exists(common_location): mx.abort("Configuration file doesn't exist") parse_common_json(common_location) if args.java_distribution is not None: settings["java-distribution"] = JdkDistribution.by_name( args.java_distribution) else: settings["java-distribution"] = JdkDistribution.choose_dist(is_quiet()) if args.alias is not None: settings["alias"] = args.alias if args.keep_archive is not None: settings["keep-archive"] = args.keep_archive if mx.is_darwin() and (args.strip_contents_home is not None): settings["strip-contents-home"] = args.strip_contents_home return settings
def _run_tests(args, harness, vmLauncher, annotations, testfile, blacklist, whitelist, regex, suite): vmArgs, tests = mx.extract_VM_args(args) for t in tests: if t.startswith('-'): mx.abort('VM option ' + t + ' must precede ' + tests[0]) # this is what should be used compat_suite = suite if suite else mx.primary_suite() if suite != mx._mx_suite and compat_suite.getMxCompatibility( ).useDistsForUnittest(): jar_distributions = [ d for d in mx.sorted_dists() if d.isJARDistribution() and exists(d.classpath_repr( resolve=False)) and (not suite or d.suite == suite) ] # find a corresponding distribution for each test candidates = _find_classes_by_annotated_methods( annotations, jar_distributions, vmLauncher.jdk()) else: binary_deps = [ d for d in mx.dependencies(opt_limit_to_suite=True) if d.isJARDistribution() and isinstance(d.suite, mx.BinarySuite) and (not suite or suite == d.suite) ] candidates = _find_classes_by_annotated_methods( annotations, binary_deps, vmLauncher.jdk()) for p in mx.projects(opt_limit_to_suite=True): if not p.isJavaProject(): continue if suite and not p.suite == suite: continue if vmLauncher.jdk().javaCompliance < p.javaCompliance: continue for c in _find_classes_with_annotations(p, None, annotations): candidates[c] = p classes = [] if len(tests) == 0: classes = candidates.keys() depsContainingTests = set(candidates.values()) else: depsContainingTests = set() found = False if len(tests) == 1 and '#' in tests[0]: words = tests[0].split('#') if len(words) != 2: mx.abort("Method specification is class#method: " + tests[0]) t, method = words for c, p in candidates.iteritems(): # prefer exact matches first if t == c: found = True classes.append(c) depsContainingTests.add(p) if not found: for c, p in candidates.iteritems(): if t in c: found = True classes.append(c) depsContainingTests.add(p) if not found: mx.warn('no tests matched by substring: ' + t + ' (did you forget to run "mx build"?)') elif len(classes) != 1: mx.abort('More than one test matches substring {0} {1}'.format( t, classes)) classes = [c + "#" + method for c in classes] else: for t in tests: if '#' in t: mx.abort( 'Method specifications can only be used in a single test: ' + t) for c, p in candidates.iteritems(): if t in c: found = True classes.append(c) depsContainingTests.add(p) if not found: mx.warn('no tests matched by substring: ' + t + ' (did you forget to run "mx build"?)') if blacklist: classes = [ c for c in classes if not any((glob.match(c) for glob in blacklist)) ] if whitelist: classes = [ c for c in classes if any((glob.match(c) for glob in whitelist)) ] if regex: classes = [c for c in classes if re.search(regex, c)] if len(classes) != 0: f_testfile = open(testfile, 'w') for c in classes: f_testfile.write(c + '\n') f_testfile.close() harness(depsContainingTests, vmLauncher, vmArgs)
def branch(self): mxsuite = mx.primary_suite() name = mxsuite.vc and mxsuite.vc.active_branch(mxsuite.dir, abortOnError=False) or "<unknown>" return name
def _unittest_config_participant_tck(config): def find_path_arg(vmArgs, prefix): for index in reversed(range(len(vmArgs) - 1)): if prefix in vmArgs[index]: return index, vmArgs[index][len(prefix):] return None, None def create_filter(requiredResource): def has_resource(dist): if dist.isJARDistribution() and exists(dist.path): with zipfile.ZipFile(dist.path, "r") as zf: try: zf.getinfo(requiredResource) except KeyError: return False else: return True else: return False return has_resource def import_visitor(suite, suite_import, predicate, collector, javaProperties, seenSuites, **extra_args): suite_collector(mx.suite(suite_import.name), predicate, collector, javaProperties, seenSuites) def suite_collector(suite, predicate, collector, javaProperties, seenSuites): if suite.name in seenSuites: return seenSuites.add(suite.name) suite.visit_imports(import_visitor, predicate=predicate, collector=collector, javaProperties=javaProperties, seenSuites=seenSuites) for dist in suite.dists: if predicate(dist): for distCpEntry in mx.classpath_entries(dist): if hasattr(distCpEntry, "getJavaProperties"): for key, value in dist.getJavaProperties().items(): javaProperties[key] = value if distCpEntry.isJdkLibrary() or distCpEntry.isJreLibrary( ): cpPath = distCpEntry.classpath_repr(mx.get_jdk(), resolve=True) else: cpPath = distCpEntry.classpath_repr(resolve=True) if cpPath: collector[cpPath] = None javaPropertiesToAdd = OrderedDict() providers = OrderedDict() suite_collector( mx.primary_suite(), create_filter( "META-INF/services/org.graalvm.polyglot.tck.LanguageProvider"), providers, javaPropertiesToAdd, set()) languages = OrderedDict() suite_collector(mx.primary_suite(), create_filter("META-INF/truffle/language"), languages, javaPropertiesToAdd, set()) suite_collector( mx.primary_suite(), lambda dist: dist.isJARDistribution( ) and dist.name == "TRUFFLE_TCK_INSTRUMENTATION" and exists(dist.path), languages, javaPropertiesToAdd, set()) vmArgs, mainClass, mainClassArgs = config cpIndex, cpValue = mx.find_classpath_arg(vmArgs) cpBuilder = OrderedDict() if cpValue: for cpElement in cpValue.split(os.pathsep): cpBuilder[cpElement] = None for providerCpElement in providers: cpBuilder[providerCpElement] = None if _is_graalvm(mx.get_jdk()): common = OrderedDict() suite_collector( mx.primary_suite(), lambda dist: dist.isJARDistribution( ) and dist.name == "TRUFFLE_TCK_COMMON" and exists(dist.path), common, javaPropertiesToAdd, set()) tpIndex, tpValue = find_path_arg(vmArgs, '-Dtruffle.class.path.append=') tpBuilder = OrderedDict() if tpValue: for cpElement in tpValue.split(os.pathsep): tpBuilder[cpElement] = None for langCpElement in languages: tpBuilder[langCpElement] = None bpIndex, bpValue = find_path_arg(vmArgs, '-Xbootclasspath/a:') bpBuilder = OrderedDict() if bpValue: for cpElement in bpValue.split(os.pathsep): bpBuilder[cpElement] = None for bootCpElement in common: bpBuilder[bootCpElement] = None cpBuilder.pop(bootCpElement, None) tpBuilder.pop(bootCpElement, None) tpValue = '-Dtruffle.class.path.append=' + os.pathsep.join( (e for e in tpBuilder)) if tpIndex: vmArgs[tpIndex] = tpValue else: vmArgs.append(tpValue) bpValue = '-Xbootclasspath/a:' + os.pathsep.join( (e for e in bpBuilder)) if bpIndex: vmArgs[bpIndex] = bpValue else: vmArgs.append(bpValue) else: for langCpElement in languages: cpBuilder[langCpElement] = None cpValue = os.pathsep.join((e for e in cpBuilder)) if cpIndex: vmArgs[cpIndex] = cpValue else: vmArgs.append("-cp") vmArgs.append(cpValue) for key, value in javaPropertiesToAdd.items(): vmArgs.append("-D" + key + "=" + value) return (vmArgs, mainClass, mainClassArgs)
def _unittest_config_participant_tck(config): def find_path_arg(vmArgs, prefix): for index in reversed(range(len(vmArgs) - 1)): if prefix in vmArgs[index]: return index, vmArgs[index][len(prefix):] return None, None def create_filter(requiredResource): def has_resource(dist): if dist.isJARDistribution() and exists(dist.path): with zipfile.ZipFile(dist.path, "r") as zf: try: zf.getinfo(requiredResource) except KeyError: return False else: return True else: return False return has_resource def import_visitor(suite, suite_import, predicate, collector, javaProperties, seenSuites, **extra_args): suite_collector(mx.suite(suite_import.name), predicate, collector, javaProperties, seenSuites) def suite_collector(suite, predicate, collector, javaProperties, seenSuites): if suite.name in seenSuites: return seenSuites.add(suite.name) suite.visit_imports(import_visitor, predicate=predicate, collector=collector, javaProperties=javaProperties, seenSuites=seenSuites) for dist in suite.dists: if predicate(dist): for distCpEntry in mx.classpath_entries(dist): if hasattr(distCpEntry, "getJavaProperties"): for key, value in dist.getJavaProperties().items(): javaProperties[key] = value if distCpEntry.isJdkLibrary() or distCpEntry.isJreLibrary(): cpPath = distCpEntry.classpath_repr(mx.get_jdk(), resolve=True) else: cpPath = distCpEntry.classpath_repr(resolve=True) if cpPath: collector[cpPath] = None javaPropertiesToAdd = OrderedDict() providers = OrderedDict() suite_collector(mx.primary_suite(), create_filter("META-INF/services/org.graalvm.polyglot.tck.LanguageProvider"), providers, javaPropertiesToAdd, set()) languages = OrderedDict() suite_collector(mx.primary_suite(), create_filter("META-INF/truffle/language"), languages, javaPropertiesToAdd, set()) suite_collector(mx.primary_suite(), lambda dist: dist.isJARDistribution() and "TRUFFLE_TCK_INSTRUMENTATION" == dist.name and exists(dist.path), languages, javaPropertiesToAdd, set()) vmArgs, mainClass, mainClassArgs = config cpIndex, cpValue = mx.find_classpath_arg(vmArgs) cpBuilder = OrderedDict() if cpValue: for cpElement in cpValue.split(os.pathsep): cpBuilder[cpElement] = None for providerCpElement in providers: cpBuilder[providerCpElement] = None if _is_graalvm(mx.get_jdk()): common = OrderedDict() suite_collector(mx.primary_suite(), lambda dist: dist.isJARDistribution() and "TRUFFLE_TCK_COMMON" == dist.name and exists(dist.path), common, javaPropertiesToAdd, set()) tpIndex, tpValue = find_path_arg(vmArgs, '-Dtruffle.class.path.append=') tpBuilder = OrderedDict() if tpValue: for cpElement in tpValue.split(os.pathsep): tpBuilder[cpElement] = None for langCpElement in languages: tpBuilder[langCpElement] = None bpIndex, bpValue = find_path_arg(vmArgs, '-Xbootclasspath/a:') bpBuilder = OrderedDict() if bpValue: for cpElement in bpValue.split(os.pathsep): bpBuilder[cpElement] = None for bootCpElement in common: bpBuilder[bootCpElement] = None cpBuilder.pop(bootCpElement, None) tpBuilder.pop(bootCpElement, None) tpValue = '-Dtruffle.class.path.append=' + os.pathsep.join((e for e in tpBuilder)) if tpIndex: vmArgs[tpIndex] = tpValue else: vmArgs.append(tpValue) bpValue = '-Xbootclasspath/a:' + os.pathsep.join((e for e in bpBuilder)) if bpIndex: vmArgs[bpIndex] = bpValue else: vmArgs.append(bpValue) else: for langCpElement in languages: cpBuilder[langCpElement] = None cpValue = os.pathsep.join((e for e in cpBuilder)) if cpIndex: vmArgs[cpIndex] = cpValue else: vmArgs.append("-cp") vmArgs.append(cpValue) for key, value in javaPropertiesToAdd.items(): vmArgs.append("-D" + key + "=" + value) return (vmArgs, mainClass, mainClassArgs)