示例#1
0
文件: mx_jvmci.py 项目: mearvk/JVM
def c1visualizer(args):
    """run the Cl Compiler Visualizer"""
    libpath = join(_suite.dir, 'lib')
    if mx.get_os() == 'windows':
        executable = join(libpath, 'c1visualizer', 'bin', 'c1visualizer.exe')
    else:
        executable = join(libpath, 'c1visualizer', 'bin', 'c1visualizer')

    # Check whether the current C1Visualizer installation is the up-to-date
    if exists(executable) and not exists(mx.library('C1VISUALIZER_DIST').get_path(resolve=False)):
        mx.log('Updating C1Visualizer')
        shutil.rmtree(join(libpath, 'c1visualizer'))

    archive = mx.library('C1VISUALIZER_DIST').get_path(resolve=True)

    if not exists(executable):
        zf = zipfile.ZipFile(archive, 'r')
        zf.extractall(libpath)

    if not exists(executable):
        mx.abort('C1Visualizer binary does not exist: ' + executable)

    if mx.get_os() != 'windows':
        # Make sure that execution is allowed. The zip file does not always specfiy that correctly
        os.chmod(executable, 0777)

    mx.run([executable])
示例#2
0
def checkheaders(args):
    """check Java source headers against any required pattern"""
    failures = {}
    for p in mx.projects():
        if not p.isJavaProject():
            continue

        csConfig = join(mx.project(p.checkstyleProj).dir, '.checkstyle_checks.xml')
        if not exists(csConfig):
            mx.log('Cannot check headers for ' + p.name + ' - ' + csConfig + ' does not exist')
            continue
        dom = xml.dom.minidom.parse(csConfig)
        for module in dom.getElementsByTagName('module'):
            if module.getAttribute('name') == 'RegexpHeader':
                for prop in module.getElementsByTagName('property'):
                    if prop.getAttribute('name') == 'header':
                        value = prop.getAttribute('value')
                        matcher = re.compile(value, re.MULTILINE)
                        for sourceDir in p.source_dirs():
                            for root, _, files in os.walk(sourceDir):
                                for name in files:
                                    if name.endswith('.java') and name != 'package-info.java':
                                        f = join(root, name)
                                        with open(f) as fp:
                                            content = fp.read()
                                        if not matcher.match(content):
                                            failures[f] = csConfig
    for n, v in failures.iteritems():
        mx.log('{0}: header does not match RegexpHeader defined in {1}'.format(n, v))
    return len(failures)
示例#3
0
文件: mx_fastr.py 项目: chumer/fastr
def testgen(args):
    '''generate the expected output for unit tests, and All/Failing test classes'''
    parser = ArgumentParser(prog='r testgen')
    parser.add_argument('--tests', action='store', default=_all_unit_tests(), help='pattern to match test classes')
    args = parser.parse_args(args)
    # check we are in the home directory
    if os.getcwd() != _fastr_suite.dir:
        mx.abort('must run rtestgen from FastR home directory')
    # check the version of GnuR against FastR
    try:
        fastr_version = subprocess.check_output([mx.get_jdk().java, '-cp', mx.classpath('com.oracle.truffle.r.runtime'), 'com.oracle.truffle.r.runtime.RVersionNumber'])
        gnur_version = subprocess.check_output(['R', '--version'])
        if not gnur_version.startswith(fastr_version):
            mx.abort('R version is incompatible with FastR, please update to ' + fastr_version)
    except subprocess.CalledProcessError:
        mx.abort('RVersionNumber.main failed')
    # clean the test project to invoke the test analyzer AP
    testOnly = ['--projects', 'com.oracle.truffle.r.test']
    mx.clean(['--no-dist', ] + testOnly)
    mx.build(testOnly)
    # now just invoke junit with the appropriate options
    mx.log("generating expected output for packages: ")
    for pkg in args.tests.split(','):
        mx.log("    " + str(pkg))
    junit(['--tests', args.tests, '--gen-expected-output', '--gen-expected-quiet'])
示例#4
0
def native_image_layout(dists, subdir, native_image_root, debug_gr_8964=False):
    if not dists:
        return
    dest_path = join(native_image_root, subdir)
    # Cleanup leftovers from previous call
    if exists(dest_path):
        if debug_gr_8964:
            mx.log('[mx_substratevm.native_image_layout: remove_tree: ' + dest_path + ']')
        remove_tree(dest_path)
    mkpath(dest_path)
    # Create symlinks to conform with native-image directory layout scheme
    def symlink_jar(jar_path):
        if debug_gr_8964:
            def log_stat(prefix, file_name):
                file_stat = os.stat(file_name)
                mx.log('    ' + prefix + '.st_mode: ' + oct(file_stat.st_mode))
                mx.log('    ' + prefix + '.st_mtime: ' + time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(file_stat.st_mtime)))

            dest_jar = join(dest_path, basename(jar_path))
            mx.log('[mx_substratevm.native_image_layout.symlink_jar: symlink_or_copy')
            mx.log('  src: ' + jar_path)
            log_stat('src', jar_path)
            mx.log('  dst: ' + dest_jar)
            symlink_or_copy(jar_path, dest_jar, debug_gr_8964)
            log_stat('dst', dest_jar)
            mx.log(']')
        else:
            symlink_or_copy(jar_path, join(dest_path, basename(jar_path)), debug_gr_8964)

    for dist in dists:
        mx.logv('Add ' + type(dist).__name__ + ' ' + str(dist) + ' to ' + dest_path)
        symlink_jar(dist.path)
        if not dist.isBaseLibrary() and dist.sourcesPath:
            symlink_jar(dist.sourcesPath)
示例#5
0
def verify_jvmci_ci_versions(args=None, extraVMarguments=None):
    version_pattern = re.compile(r'^(?!\s*#).*jvmci-(?P<version>\d*\.\d*)')

    def _grep_version(files, msg):
        version = None
        last = None
        linenr = 0
        for filename in files:
            for line in open(filename):
                m = version_pattern.search(line)
                if m:
                    new_version = m.group('version')
                    if version and version != new_version:
                        mx.abort(
                            os.linesep.join([
                                "Multiple JVMCI versions found in {0} files:".format(msg),
                                "  {0} in {1}:{2}:    {3}".format(version, *last),
                                "  {0} in {1}:{2}:    {3}".format(new_version, filename, linenr, line),
                            ]))
                    last = (filename, linenr, line.rstrip())
                    version = new_version
                linenr = linenr + 1
        if not version:
            mx.abort("No JVMCI version found in {0} files!".format(msg))
        return version

    hocon_version = _grep_version(glob.glob(join(mx.primary_suite().dir, 'ci*.hocon')) + glob.glob(join(mx.primary_suite().dir, 'ci*/*.hocon')), 'ci.hocon')
    travis_version = _grep_version(glob.glob('.travis.yml'), 'TravisCI')
    if hocon_version != travis_version:
        mx.abort("Travis and ci.hocon JVMCI versions do not match: {0} vs. {1}".format(travis_version, hocon_version))
    mx.log('JVMCI versions are ok!')
示例#6
0
def jaotc_test(args):
    """run (acceptance) tests for the AOT compiler (jaotc)"""
    all_tests = ['HelloWorld', 'java.base', 'javac']
    parser = ArgumentParser(prog='mx jaotc-test')
    parser.add_argument("--list", default=None, action="store_true", help="Print the list of available jaotc tests.")
    parser.add_argument('tests', help='tests to run (omit to run all tests)', nargs=ZERO_OR_MORE)
    args = parser.parse_args(args)

    if args.list:
        print "The following jaotc tests are available:\n"
        for name in all_tests:
            print "  " + name
        return

    tests = args.tests or all_tests
    for test in tests:
        mx.log('Testing `{}`'.format(test))
        if test == 'HelloWorld':
            test_class(
                classpath=mx.classpath('JAOTC_TEST'),
                main_class='jdk.tools.jaotc.test.HelloWorld'
            )
        elif test == 'javac':
            test_javac('jdk.tools.jaotc')
        elif test == 'java.base':
            test_modules(
                classpath=mx.project('jdk.tools.jaotc.test').output_dir(),
                main_class='jdk.tools.jaotc.test.HelloWorld',
                modules=['java.base']
            )
        else:
            mx.abort('Unknown jaotc test: {}'.format(test))
def _run_netbeans_app(app_name, env=None, args=None):
    args = [] if args is None else args
    dist = app_name.upper() + '_DIST'
    name = app_name.lower()
    extractPath = join(_suite.get_output_root())
    if mx.get_os() == 'windows':
        executable = join(extractPath, name, 'bin', name + '.exe')
    else:
        executable = join(extractPath, name, 'bin', name)

    # Check whether the current installation is up-to-date
    if exists(executable) and not exists(mx.library(dist).get_path(resolve=False)):
        mx.log('Updating ' + app_name)
        shutil.rmtree(join(extractPath, name))

    archive = mx.library(dist).get_path(resolve=True)

    if not exists(executable):
        zf = zipfile.ZipFile(archive, 'r')
        zf.extractall(extractPath)

    if not exists(executable):
        mx.abort(app_name + ' binary does not exist: ' + executable)

    if mx.get_os() != 'windows':
        # Make sure that execution is allowed. The zip file does not always specfiy that correctly
        os.chmod(executable, 0777)
    mx.run([executable]+args, env=env)
示例#8
0
    def parseVmArgs(self, args, addDefaultArgs=True):
        args = mx.expand_project_in_args(args, insitu=False)
        jacocoArgs = mx_gate.get_jacoco_agent_args()
        if jacocoArgs:
            args = jacocoArgs + args

        args = ['-Xbootclasspath/p:' + dep.classpath_repr() for dep in _jvmci_bootclasspath_prepends] + args

        # Remove JVMCI jars from class path. They are only necessary when
        # compiling with a javac from JDK8 or earlier.
        cpIndex, cp = mx.find_classpath_arg(args)
        if cp:
            excluded = frozenset([dist.path for dist in _suite.dists])
            cp = os.pathsep.join([e for e in cp.split(os.pathsep) if e not in excluded])
            args[cpIndex] = cp

        jvmciModeArgs = _jvmciModes[_vm.jvmciMode]
        if jvmciModeArgs:
            bcpDeps = [jdkDist.dist() for jdkDist in jdkDeployedDists]
            if bcpDeps:
                args = ['-Xbootclasspath/p:' + os.pathsep.join([d.classpath_repr() for d in bcpDeps])] + args

        # Set the default JVMCI compiler
        for jdkDist in reversed(jdkDeployedDists):
            assert isinstance(jdkDist, JvmciJDKDeployedDist), jdkDist
            if jdkDist._compilers:
                jvmciCompiler = jdkDist._compilers[-1]
                args = ['-Djvmci.compiler=' + jvmciCompiler] + args
                break

        if '-version' in args:
            ignoredArgs = args[args.index('-version') + 1:]
            if  len(ignoredArgs) > 0:
                mx.log("Warning: The following options will be ignored by the vm because they come after the '-version' argument: " + ' '.join(ignoredArgs))
        return self.processArgs(args, addDefaultArgs=addDefaultArgs)
示例#9
0
    def __init__(self, title, tasks=None, disableJacoco=False):
        self.tasks = tasks
        self.title = title
        self.skipped = False
        if tasks is not None:
            for t in tasks:
                if t.title == title:
                    mx.abort('Gate task with title "' + title + '" is already defined')

            if Task.startAtFilter:
                assert not Task.filters
                if Task.startAtFilter in title:
                    self.skipped = False
                    Task.startAtFilter = None
                else:
                    self.skipped = True
            elif Task.filters:
                if Task.filtersExclude:
                    self.skipped = any([f in title for f in Task.filters])
                else:
                    self.skipped = not any([f in title for f in Task.filters])
        if not self.skipped:
            self.start = time.time()
            self.end = None
            self.duration = None
            self.disableJacoco = disableJacoco
            mx.log(time.strftime('gate: %d %b %Y %H:%M:%S: BEGIN: ') + title)
示例#10
0
def _create_jdk_bundle(jdkBuildDir, debugLevel, jdkImageDir):
    """
    Creates a tar.gz JDK archive, an accompanying tar.gz.sha1 file with its
    SHA1 signature plus symlinks to the archive for non-canonical architecture names.
    """

    arches = _get_jdk_bundle_arches()
    jdkTgzPath = join(_suite.get_output_root(), 'jdk-bundles', 'jdk9-{}-{}-{}.tar.gz'.format(debugLevel, _get_openjdk_os(), arches[0]))
    with mx.Archiver(jdkTgzPath, kind='tgz') as arc:
        mx.log('Creating ' + jdkTgzPath)
        for root, _, filenames in os.walk(jdkImageDir):
            for name in filenames:
                f = join(root, name)
                arcname = 'jdk1.9.0/' + os.path.relpath(f, jdkImageDir)
                arc.zf.add(name=f, arcname=arcname, recursive=False)

    with open(jdkTgzPath + '.sha1', 'w') as fp:
        mx.log('Creating ' + jdkTgzPath + '.sha1')
        fp.write(mx.sha1OfFile(jdkTgzPath))

    def _create_link(source, link_name):
        if exists(link_name):
            os.remove(link_name)
        mx.log('Creating ' + link_name + ' -> ' + source)
        os.symlink(source, link_name)

    for arch in arches[1:]:
        link_name = join(_suite.get_output_root(), 'jdk-bundles', 'jdk9-{}-{}-{}.tar.gz'.format(debugLevel, _get_openjdk_os(), arch))
        jdkTgzName = os.path.basename(jdkTgzPath)
        _create_link(jdkTgzName, link_name)
        _create_link(jdkTgzName + '.sha1', link_name + '.sha1')
示例#11
0
    def parseVmArgs(self, args, addDefaultArgs=True):
        args = mx.expand_project_in_args(args, insitu=False)
        jacocoArgs = mx_gate.get_jacoco_agent_args()
        if jacocoArgs:
            args = jacocoArgs + args

        args = ['-Xbootclasspath/p:' + dep.classpath_repr() for dep in _jvmci_bootclasspath_prepends] + args

        jvmciModeArgs = _jvmciModes[_vm.jvmciMode]
        if jvmciModeArgs:
            bcpDeps = [jdkDist.dist() for jdkDist in jdkDeployedDists]
            if bcpDeps:
                args = ['-Xbootclasspath/p:' + os.pathsep.join([d.classpath_repr() for d in bcpDeps])] + args

        # Set the default JVMCI compiler
        for jdkDist in reversed(jdkDeployedDists):
            assert isinstance(jdkDist, JvmciJDKDeployedDist), jdkDist
            if jdkDist._compilers:
                jvmciCompiler = jdkDist._compilers[-1]
                args = ['-Djvmci.compiler=' + jvmciCompiler] + args
                break

        if '-version' in args:
            ignoredArgs = args[args.index('-version') + 1:]
            if  len(ignoredArgs) > 0:
                mx.log("Warning: The following options will be ignored by the vm because they come after the '-version' argument: " + ' '.join(ignoredArgs))
        return self.processArgs(args, addDefaultArgs=addDefaultArgs)
示例#12
0
文件: mx_gate.py 项目: djoooooe/mx
def checkheaders(args):
    """check Java source headers against any required pattern"""
    failures = {}
    for p in mx.projects():
        if not p.isJavaProject():
            continue

        csConfig = join(mx.project(p.checkstyleProj).dir, ".checkstyle_checks.xml")
        if not exists(csConfig):
            mx.log("Cannot check headers for " + p.name + " - " + csConfig + " does not exist")
            continue
        dom = xml.dom.minidom.parse(csConfig)
        for module in dom.getElementsByTagName("module"):
            if module.getAttribute("name") == "RegexpHeader":
                for prop in module.getElementsByTagName("property"):
                    if prop.getAttribute("name") == "header":
                        value = prop.getAttribute("value")
                        matcher = re.compile(value, re.MULTILINE)
                        for sourceDir in p.source_dirs():
                            for root, _, files in os.walk(sourceDir):
                                for name in files:
                                    if name.endswith(".java") and name != "package-info.java":
                                        f = join(root, name)
                                        with open(f) as fp:
                                            content = fp.read()
                                        if not matcher.match(content):
                                            failures[f] = csConfig
    for n, v in failures.iteritems():
        mx.log("{0}: header does not match RegexpHeader defined in {1}".format(n, v))
    return len(failures)
示例#13
0
def js_image_test(binary, bench_location, name, warmup_iterations, iterations, timeout=None, bin_args=None):
    bin_args = bin_args if bin_args is not None else []
    jsruncmd = [binary] + bin_args + [join(bench_location, 'harness.js'), '--', join(bench_location, name + '.js'),
                                      '--', '--warmup-iterations=' + str(warmup_iterations),
                                      '--iterations=' + str(iterations)]
    mx.log(' '.join(jsruncmd))

    passing = []

    stdoutdata = []
    def stdout_collector(x):
        stdoutdata.append(x)
        mx.log(x.rstrip())
    stderrdata = []
    def stderr_collector(x):
        stderrdata.append(x)
        mx.warn(x.rstrip())

    returncode = mx.run(jsruncmd, cwd=bench_location, out=stdout_collector, err=stderr_collector, nonZeroIsFatal=False, timeout=timeout)

    if returncode == mx.ERROR_TIMEOUT:
        print('INFO: TIMEOUT (> %d): %s' % (timeout, name))
    elif returncode >= 0:
        matches = 0
        for line in stdoutdata:
            if re.match(r'^\S+: *\d+(\.\d+)?\s*$', line):
                matches += 1
        if matches > 0:
            passing = stdoutdata

    if not passing:
        mx.abort('JS benchmark ' + name + ' failed')
示例#14
0
    def benchmark(self, mxBenchmarkArgs, bmSuiteArgs):
        """Run a benchmark suite."""
        parser = ArgumentParser(prog="mx benchmark", description=benchmark.__doc__)
        parser.add_argument("benchmark", help="Benchmark to run, format: <suite>:<benchmark>.")
        parser.add_argument(
            "--results-file", default="bench-results.json", help="Path to JSON output file with benchmark results."
        )
        parser.add_argument("--machine-name", default=None, help="Abstract name of the target machine.")
        mxBenchmarkArgs = parser.parse_args(mxBenchmarkArgs)

        self.checkEnvironmentVars()

        suite, benchNamesList = self.getSuiteAndBenchNames(mxBenchmarkArgs)

        results = []

        failures_seen = False
        suite.before(bmSuiteArgs)
        for benchnames in benchNamesList:
            suite.validateEnvironment()
            try:
                partialResults = self.execute(suite, benchnames, mxBenchmarkArgs, bmSuiteArgs)
                results.extend(partialResults)
            except RuntimeError:
                failures_seen = True
                mx.log(traceback.format_exc())

        topLevelJson = {"queries": results}
        dump = json.dumps(topLevelJson)
        with open(mxBenchmarkArgs.results_file, "w") as txtfile:
            txtfile.write(dump)
        if failures_seen:
            return 1
        return 0
示例#15
0
 def run(self, cwd, args):
     out = mx.TeeOutputCapture(mx.OutputCapture())
     args = self.post_process_command_line_args(args)
     mx.log("Running JVM with args: {0}".format(args))
     code = self.run_java(args, out=out, err=out, cwd=cwd, nonZeroIsFatal=False)
     out = out.underlying.data
     dims = self.dimensions(cwd, args, code, out)
     return code, out, dims
示例#16
0
文件: mx_unittest.py 项目: charig/mx
def unittest(args):
    """run the JUnit tests"""

    parser = ArgumentParser(prog='mx unittest',
          description='run the JUnit tests',
          add_help=False,
          formatter_class=RawDescriptionHelpFormatter,
          epilog=unittestHelpSuffix,
        )
    parser.add_argument('--blacklist', help='run all testcases not specified in <file>', metavar='<file>')
    parser.add_argument('--whitelist', help='run testcases specified in <file> only', metavar='<file>')
    parser.add_argument('--verbose', help='enable verbose JUnit output', action='store_true')
    parser.add_argument('--very-verbose', help='enable very verbose JUnit output', action='store_true')
    parser.add_argument('--fail-fast', help='stop after first JUnit test class that has a failure', action='store_true')
    parser.add_argument('--enable-timing', help='enable JUnit test timing (requires --verbose/--very-verbose)', action='store_true')
    parser.add_argument('--regex', help='run only testcases matching a regular expression', metavar='<regex>')
    parser.add_argument('--color', help='enable color output', action='store_true')
    parser.add_argument('--gc-after-test', help='force a GC after each test', action='store_true')
    parser.add_argument('--suite', help='run only the unit tests in <suite>', metavar='<suite>')
    parser.add_argument('--repeat', help='run only the unit tests in <suite>', type=is_strictly_positive)
    eagerStacktrace = parser.add_mutually_exclusive_group()
    eagerStacktrace.add_argument('--eager-stacktrace', action='store_const', const=True, dest='eager_stacktrace', help='print test errors as they occur (default)')
    eagerStacktrace.add_argument('--no-eager-stacktrace', action='store_const', const=False, dest='eager_stacktrace', help='print test errors after all tests have run')

    ut_args = []
    delimiter = False
    # check for delimiter
    while len(args) > 0:
        arg = args.pop(0)
        if arg == '--':
            delimiter = True
            break
        ut_args.append(arg)

    if delimiter:
        # all arguments before '--' must be recognized
        parsed_args = parser.parse_args(ut_args)
    else:
        # parse all know arguments
        parsed_args, args = parser.parse_known_args(ut_args)

    if parsed_args.whitelist:
        try:
            with open(parsed_args.whitelist) as fp:
                parsed_args.whitelist = [re.compile(fnmatch.translate(l.rstrip())) for l in fp.readlines() if not l.startswith('#')]
        except IOError:
            mx.log('warning: could not read whitelist: ' + parsed_args.whitelist)
    if parsed_args.blacklist:
        try:
            with open(parsed_args.blacklist) as fp:
                parsed_args.blacklist = [re.compile(fnmatch.translate(l.rstrip())) for l in fp.readlines() if not l.startswith('#')]
        except IOError:
            mx.log('warning: could not read blacklist: ' + parsed_args.blacklist)
    if parsed_args.eager_stacktrace is None:
        parsed_args.eager_stacktrace = True

    _unittest(args, ['@Test', '@Parameters'], **parsed_args.__dict__)
示例#17
0
def deploy_binary_if_truffle_head(args):
    """If the active branch is 'truffle-head', deploy binaries for the primary suite to remote maven repository."""
    primary_branch = 'truffle-head'
    active_branch = mx.VC.get_vc(_suite.dir).active_branch(_suite.dir)
    if active_branch == primary_branch:
        return mx.command_function('deploy-binary')(args)
    else:
        mx.log('The active branch is "%s". Binaries are deployed only if the active branch is "%s".' % (active_branch, primary_branch))
        return 0
示例#18
0
文件: mx_unittest.py 项目: dougxc/mx
def unittest(args):
    """run the JUnit tests"""

    parser = ArgumentParser(
        prog="mx unittest",
        description="run the JUnit tests",
        add_help=False,
        formatter_class=RawDescriptionHelpFormatter,
        epilog=unittestHelpSuffix,
    )
    parser.add_argument("--blacklist", help="run all testcases not specified in <file>", metavar="<file>")
    parser.add_argument("--whitelist", help="run testcases specified in <file> only", metavar="<file>")
    parser.add_argument("--verbose", help="enable verbose JUnit output", action="store_true")
    parser.add_argument("--fail-fast", help="stop after first JUnit test class that has a failure", action="store_true")
    parser.add_argument("--enable-timing", help="enable JUnit test timing", action="store_true")
    parser.add_argument("--regex", help="run only testcases matching a regular expression", metavar="<regex>")
    parser.add_argument("--color", help="enable color output", action="store_true")
    parser.add_argument("--eager-stacktrace", help="print stacktrace eagerly", action="store_true")
    parser.add_argument("--gc-after-test", help="force a GC after each test", action="store_true")
    parser.add_argument("--suite", help="run only the unit tests in <suite>", metavar="<suite>")

    ut_args = []
    delimiter = False
    # check for delimiter
    while len(args) > 0:
        arg = args.pop(0)
        if arg == "--":
            delimiter = True
            break
        ut_args.append(arg)

    if delimiter:
        # all arguments before '--' must be recognized
        parsed_args = parser.parse_args(ut_args)
    else:
        # parse all know arguments
        parsed_args, args = parser.parse_known_args(ut_args)

    if parsed_args.whitelist:
        try:
            with open(parsed_args.whitelist) as fp:
                parsed_args.whitelist = [
                    re.compile(fnmatch.translate(l.rstrip())) for l in fp.readlines() if not l.startswith("#")
                ]
        except IOError:
            mx.log("warning: could not read whitelist: " + parsed_args.whitelist)
    if parsed_args.blacklist:
        try:
            with open(parsed_args.blacklist) as fp:
                parsed_args.blacklist = [
                    re.compile(fnmatch.translate(l.rstrip())) for l in fp.readlines() if not l.startswith("#")
                ]
        except IOError:
            mx.log("warning: could not read blacklist: " + parsed_args.blacklist)

    _unittest(args, ["@Test", "@Parameters"], **parsed_args.__dict__)
示例#19
0
文件: mx_jruby.py 项目: grddev/jruby
 def build(self):
     cwd = _suite.dir
     maven_repo_arg, env = mavenSetup()
     mx.log("Building jruby-core with Maven")
     mx.run_maven(['-q', '-DskipTests', maven_repo_arg, '-pl', 'core,lib'], cwd=cwd, env=env)
     # Install Bundler
     gem_home = join(_suite.dir, 'lib', 'ruby', 'gems', 'shared')
     env['GEM_HOME'] = gem_home
     env['GEM_PATH'] = gem_home
     mx.run(['bin/jruby', 'bin/gem', 'install', 'bundler', '-v', '1.10.6'], cwd=cwd, env=env)
示例#20
0
def build(args, vm=None):
    if any([opt in args for opt in ['-h', '--help']]):
        orig_command_build(args, vm)

    mx.log('build: Checking SubstrateVM requirements for building ...')

    if not _host_os_supported():
        mx.abort('build: SubstrateVM can be built only on Darwin, Linux and Windows platforms')

    orig_command_build(args, vm)
示例#21
0
 def build(self):
     cwd = _suite.dir
     maven_repo_arg, env = mavenSetup()
     mx.log("Building jruby-core with Maven")
     quiet = [] if mx.get_opts().verbose else ['-q']
     mx.run_maven(quiet + ['-DskipTests', maven_repo_arg, '-Dcreate.sources.jar', '-pl', 'core,lib'], cwd=cwd, env=env)
     # Install Bundler
     gem_home = join(_suite.dir, 'lib', 'ruby', 'gems', 'shared')
     env['GEM_HOME'] = gem_home
     env['GEM_PATH'] = gem_home
     mx.run(['bin/jruby', 'bin/gem', 'install', 'bundler', '-v', '1.10.6'], cwd=cwd, env=env)
示例#22
0
def makejdk(args):
    """create a JDK directory based on the Maxine VM

    Create a JDK directory by replicating the file structure of $JAVA_HOME
    and replacing the 'java' executable with the Maxine VM
    executable. This produces a Maxine VM based JDK for applications
    (such as NetBeans) which expect a certain directory structure
    and executable names in a JDK installation."""

    if mx.os == 'darwin':
        mx.log('mx makejdk is not supported on Darwin')
        mx.abort(1)

    if len(args) == 0:
        maxjdk = join(_maxine_home, 'maxjdk')
    else:
        maxjdk = args[0]
        if maxjdk[0] != '/':
            maxjdk = join(os.getcwd(), maxjdk)

    if exists(maxjdk):
        mx.log('The destination directory already exists -- it will be deleted')
        shutil.rmtree(maxjdk)

    jdk = mx.java().jdk
    if not isdir(jdk):
        mx.log(jdk + " does not exist or is not a directory")
        mx.abort(1)

    mx.log('Replicating ' + jdk + ' in ' + maxjdk + '...')
    shutil.copytree(jdk, maxjdk, symlinks=True)

    jreExists = exists(join(maxjdk, 'jre'))

    for f in os.listdir(_vmdir):
        fpath = join(_vmdir, f)
        if isfile(fpath):
            shutil.copy(fpath, join(maxjdk, 'bin'))
            if jreExists:
                shutil.copy(fpath, join(maxjdk, 'jre', 'bin'))

    os.unlink(join(maxjdk, 'bin', 'java'))
    if jreExists:
        os.unlink(join(maxjdk, 'jre', 'bin', 'java'))
    if (mx.os == 'windows'):
        shutil.copy(join(maxjdk, 'bin', 'maxvm'), join(maxjdk, 'bin', 'java'))
        shutil.copy(join(maxjdk, 'jre', 'bin', 'maxvm'), join(maxjdk, 'jre', 'bin', 'java'))
    else:
        os.symlink(join(maxjdk, 'bin', 'maxvm'), join(maxjdk, 'bin', 'java'))
        if jreExists:
            os.symlink(join(maxjdk, 'jre', 'bin', 'maxvm'), join(maxjdk, 'jre', 'bin', 'java'))

    mx.log('Created Maxine based JDK in ' + maxjdk)
示例#23
0
文件: mx_gate.py 项目: djoooooe/mx
 def stop(self):
     self.end = time.time()
     self.duration = datetime.timedelta(seconds=self.end - self.start)
     mx.log(
         time.strftime("gate: %d %b %Y %H:%M:%S: END:   ")
         + self.title
         + " ["
         + str(self.duration)
         + "]"
         + Task._diskstats()
     )
     return self
示例#24
0
def _parseVmArgs(jdk, args, addDefaultArgs=True):
    args = mx.expand_project_in_args(args, insitu=False)
    jacocoArgs = mx_gate.get_jacoco_agent_args()
    if jacocoArgs:
        args = jacocoArgs + args

    # Support for -G: options
    def translateGOption(arg):
        if arg.startswith("-G:+"):
            if "=" in arg:
                mx.abort("Mixing + and = in -G: option specification: " + arg)
            arg = "-Dgraal." + arg[len("-G:+") :] + "=true"
        elif arg.startswith("-G:-"):
            if "=" in arg:
                mx.abort("Mixing - and = in -G: option specification: " + arg)
            arg = "-Dgraal." + arg[len("-G:+") :] + "=false"
        elif arg.startswith("-G:"):
            if "=" not in arg:
                mx.abort('Missing "=" in non-boolean -G: option specification: ' + arg)
            arg = "-Dgraal." + arg[len("-G:") :]
        return arg

    args = map(translateGOption, args)

    if "-G:+PrintFlags" in args and "-Xcomp" not in args:
        mx.warn("Using -G:+PrintFlags may have no effect without -Xcomp as Graal initialization is lazy")

    bcp = [mx.distribution("truffle:TRUFFLE_API").classpath_repr()]
    if _jvmciModes[_vm.jvmciMode]:
        bcp.extend([d.get_classpath_repr() for d in _bootClasspathDists])

    args = ["-Xbootclasspath/p:" + os.pathsep.join(bcp)] + args

    # Remove JVMCI from class path. It's only there to support compilation.
    cpIndex, cp = mx.find_classpath_arg(args)
    if cp:
        jvmciLib = mx.library("JVMCI").path
        cp = os.pathsep.join([e for e in cp.split(os.pathsep) if e != jvmciLib])
        args[cpIndex] = cp

    # Set the default JVMCI compiler
    jvmciCompiler = _compilers[-1]
    args = ["-Djvmci.compiler=" + jvmciCompiler] + args

    if "-version" in args:
        ignoredArgs = args[args.index("-version") + 1 :]
        if len(ignoredArgs) > 0:
            mx.log(
                "Warning: The following options will be ignored by the vm because they come after the '-version' argument: "
                + " ".join(ignoredArgs)
            )
    return jdk.processArgs(args, addDefaultArgs=addDefaultArgs)
示例#25
0
文件: mx_gate.py 项目: djoooooe/mx
 def abort(self, codeOrMessage):
     self.end = time.time()
     self.duration = datetime.timedelta(seconds=self.end - self.start)
     mx.log(
         time.strftime("gate: %d %b %Y %H:%M:%S: ABORT: ")
         + self.title
         + " ["
         + str(self.duration)
         + "]"
         + Task._diskstats()
     )
     mx.abort(codeOrMessage)
     return self
示例#26
0
def _parseVmArgs(jdk, args, addDefaultArgs=True):
    args = mx.expand_project_in_args(args, insitu=False)
    jacocoArgs = mx_gate.get_jacoco_agent_args()
    if jacocoArgs:
        args = jacocoArgs + args

    # Support for -G: options
    def translateGOption(arg):
        if arg.startswith('-G:+'):
            if '=' in arg:
                mx.abort('Mixing + and = in -G: option specification: ' + arg)
            arg = '-Dgraal.' + arg[len('-G:+'):] + '=true'
        elif arg.startswith('-G:-'):
            if '=' in arg:
                mx.abort('Mixing - and = in -G: option specification: ' + arg)
            arg = '-Dgraal.' + arg[len('-G:+'):] + '=false'
        elif arg.startswith('-G:'):
            if '=' not in arg:
                mx.abort('Missing "=" in non-boolean -G: option specification: ' + arg)
            arg = '-Dgraal.' + arg[len('-G:'):]
        return arg
    # add default graal.options.file and translate -G: options
    options_file = join(mx.primary_suite().dir, 'graal.options')
    options_file_arg = ['-Dgraal.options.file=' + options_file] if exists(options_file) else []
    args = options_file_arg + map(translateGOption, args)

    if '-G:+PrintFlags' in args and '-Xcomp' not in args:
        mx.warn('Using -G:+PrintFlags may have no effect without -Xcomp as Graal initialization is lazy')

    bcp = [mx.distribution('truffle:TRUFFLE_API').classpath_repr()]
    if _jvmciModes[_vm.jvmciMode]:
        bcp.extend([d.get_classpath_repr() for d in _bootClasspathDists])

    args = ['-Xbootclasspath/p:' + os.pathsep.join(bcp)] + args

    # Remove JVMCI from class path. It's only there to support compilation.
    cpIndex, cp = mx.find_classpath_arg(args)
    if cp:
        jvmciLib = mx.library('JVMCI').path
        cp = os.pathsep.join([e for e in cp.split(os.pathsep) if e != jvmciLib])
        args[cpIndex] = cp

    # Set the default JVMCI compiler
    jvmciCompiler = _compilers[-1]
    args = ['-Djvmci.Compiler=' + jvmciCompiler] + args

    if '-version' in args:
        ignoredArgs = args[args.index('-version') + 1:]
        if  len(ignoredArgs) > 0:
            mx.log("Warning: The following options will be ignored by the vm because they come after the '-version' argument: " + ' '.join(ignoredArgs))
    return jdk.processArgs(args, addDefaultArgs=addDefaultArgs)
示例#27
0
def unittest(args, vmLauncher=None):
    """run the JUnit tests (all testcases)"""

    parser = ArgumentParser(prog='mx unittest',
          description='run the JUnit tests',
          add_help=False,
          formatter_class=RawDescriptionHelpFormatter,
          epilog=unittestHelpSuffix,
        )
    parser.add_argument('--blacklist', help='run all testcases not specified in <file>', metavar='<file>')
    parser.add_argument('--whitelist', help='run testcases specified in <file> only', metavar='<file>')
    parser.add_argument('--verbose', help='enable verbose JUnit output', action='store_true')
    parser.add_argument('--fail-fast', help='stop after first JUnit test class that has a failure', action='store_true')
    parser.add_argument('--enable-timing', help='enable JUnit test timing', action='store_true')
    parser.add_argument('--regex', help='run only testcases matching a regular expression', metavar='<regex>')
    parser.add_argument('--color', help='enable color output', action='store_true')
    parser.add_argument('--eager-stacktrace', help='print stacktrace eagerly', action='store_true')
    parser.add_argument('--gc-after-test', help='force a GC after each test', action='store_true')

    ut_args = []
    delimiter = False
    # check for delimiter
    while len(args) > 0:
        arg = args.pop(0)
        if arg == '--':
            delimiter = True
            break
        ut_args.append(arg)

    if delimiter:
        # all arguments before '--' must be recognized
        parsed_args = parser.parse_args(ut_args)
    else:
        # parse all know arguments
        parsed_args, args = parser.parse_known_args(ut_args)

    if parsed_args.whitelist:
        try:
            with open(parsed_args.whitelist) as fp:
                parsed_args.whitelist = [re.compile(fnmatch.translate(l.rstrip())) for l in fp.readlines() if not l.startswith('#')]
        except IOError:
            mx.log('warning: could not read whitelist: ' + parsed_args.whitelist)
    if parsed_args.blacklist:
        try:
            with open(parsed_args.blacklist) as fp:
                parsed_args.blacklist = [re.compile(fnmatch.translate(l.rstrip())) for l in fp.readlines() if not l.startswith('#')]
        except IOError:
            mx.log('warning: could not read blacklist: ' + parsed_args.blacklist)

    _unittest(args, ['@Test', '@Parameters'], vmLauncher=vmLauncher, **parsed_args.__dict__)
示例#28
0
文件: mx_jvmci.py 项目: mearvk/JVM
def _runmake(args):
    """run the JDK make process

To build hotspot and import it into the JDK: "mx make hotspot import-hotspot"
{0}"""

    jdkBuildDir = _get_jdk_build_dir()
    if not exists(jdkBuildDir):
        # JDK9 must be bootstrapped with a JDK8
        compliance = mx.JavaCompliance('8')
        jdk8 = mx.get_jdk(compliance.exactMatch, versionDescription=compliance.value)
        cmd = ['sh', 'configure', '--with-debug-level=' + _vm.debugLevel, '--with-native-debug-symbols=external', '--disable-precompiled-headers', '--with-jvm-features=graal',
               '--with-jvm-variants=' + _vm.jvmVariant, '--disable-warnings-as-errors', '--with-boot-jdk=' + jdk8.home, '--with-jvm-features=graal']
        mx.run(cmd, cwd=_jdkSourceRoot)
    cmd = [mx.gmake_cmd(), 'CONF=' + _vm.debugLevel]
    if mx.get_opts().verbose:
        cmd.append('LOG=debug')
    cmd.extend(args)
    if mx.get_opts().use_jdk_image and 'images' not in args:
        cmd.append('images')

    if not mx.get_opts().verbose:
        mx.log('--------------- make execution ----------------------')
        mx.log('Working directory: ' + _jdkSourceRoot)
        mx.log('Command line: ' + ' '.join(cmd))
        mx.log('-----------------------------------------------------')

    mx.run(cmd, cwd=_jdkSourceRoot)
示例#29
0
def verify_jvmci_ci_versions(args):
    """
    Checks that the jvmci versions used in various ci files agree.

    If the ci.hocon files use a -dev version, it allows the travis ones to use the previous version.
    For example, if ci.hocon uses jvmci-0.24-dev, travis may use either jvmci-0.24-dev or jvmci-0.23
    """
    version_pattern = re.compile(r'^(?!\s*#).*jvmci-(?P<version>\d*\.\d*)(?P<dev>-dev)?')

    def _grep_version(files, msg):
        version = None
        dev = None
        last = None
        linenr = 0
        for filename in files:
            for line in open(filename):
                m = version_pattern.search(line)
                if m:
                    new_version = m.group('version')
                    new_dev = bool(m.group('dev'))
                    if (version and version != new_version) or (dev is not None and dev != new_dev):
                        mx.abort(
                            os.linesep.join([
                                "Multiple JVMCI versions found in {0} files:".format(msg),
                                "  {0} in {1}:{2}:    {3}".format(version + ('-dev' if dev else ''), *last),
                                "  {0} in {1}:{2}:    {3}".format(new_version + ('-dev' if new_dev else ''), filename, linenr, line),
                            ]))
                    last = (filename, linenr, line.rstrip())
                    version = new_version
                    dev = new_dev
                linenr += 1
        if not version:
            mx.abort("No JVMCI version found in {0} files!".format(msg))
        return version, dev

    hocon_version, hocon_dev = _grep_version(glob.glob(join(mx.primary_suite().dir, 'ci*.hocon')) + glob.glob(join(mx.primary_suite().dir, 'ci*/*.hocon')), 'ci.hocon')
    travis_version, travis_dev = _grep_version(glob.glob('.travis.yml'), 'TravisCI')

    if hocon_version != travis_version or hocon_dev != travis_dev:
        versions_ok = False
        if not travis_dev and hocon_dev:
            next_travis_version = [int(a) for a in travis_version.split('.')]
            next_travis_version[-1] += 1
            next_travis_version_str = '.'.join((str(a) for a in next_travis_version))
            if next_travis_version_str == hocon_version:
                versions_ok = True
        if not versions_ok:
            mx.abort("Travis and ci.hocon JVMCI versions do not match: {0} vs. {1}".format(travis_version + ('-dev' if travis_dev else ''), hocon_version + ('-dev' if hocon_dev else '')))
    mx.log('JVMCI versions are ok!')
示例#30
0
def configs(arg):
    """prints the predefined image configurations"""
    c = _configs()
    mx.log('The available preconfigured option sets are:')
    mx.log()
    mx.log('    Configuration    Expansion')
    for k, v in sorted(c.iteritems()):
        mx.log('    @{0:<16} {1}'.format(k, v.replace('@', ' ')))
示例#31
0
文件: mx_fastr.py 项目: iCodeIN/fastr
def _fastr_gate_runner(args, tasks):
    with mx_gate.Task('Setup no specials',
                      tasks,
                      tags=[FastRGateTags.no_specials]) as t:
        if t:
            os.environ['FASTR_OPTION_UseSpecials'] = 'false'

    with mx_gate.Task('Setup no dsl cache',
                      tasks,
                      tags=[FastRGateTags.no_dsl_cache]) as t:
        if t:
            os.environ['FASTR_OPTION_DSLCacheSizeFactor'] = '0'

    with mx_gate.Task('SetupLLVM', tasks, tags=[FastRGateTags.llvm]) as t:
        if t:
            os.environ['FASTR_RFFI'] = 'llvm'

    with mx_gate.Task('GCTorture1', tasks,
                      tags=[FastRGateTags.gc_torture1]) as t:
        if t:
            os.environ['FASTR_GCTORTURE'] = '1'

    with mx_gate.Task('GCTorture3', tasks,
                      tags=[FastRGateTags.gc_torture3]) as t:
        if t:
            os.environ['FASTR_GCTORTURE'] = '3'

    with mx_gate.Task('VerySlowAsserts',
                      tasks,
                      tags=[FastRGateTags.very_slow_asserts]) as t:
        if t:
            os.environ['FASTR_TEST_VERY_SLOW_ASSERTS'] = 'true'
    '''
    The specific additional gates tasks provided by FastR.
    '''
    with mx_gate.Task('ExtSoftVersions', tasks,
                      tags=[mx_gate.Tags.always]) as t:
        if t:
            new_env = os.environ.copy()
            new_env['R_DEFAULT_PACKAGES'] = 'base'
            run_r(['-q', '-e', 'extSoftVersion()'], 'R', env=new_env)

    with mx_gate.Task('LibsInfo', tasks, tags=[mx_gate.Tags.always]) as t:
        if t:
            mx.log("Libraries captured in FASTR_HOME/lib:")
            lib_dir = os.path.join(_fastr_suite.dir, 'lib')
            ldd = ['otool', '-L'] if platform.system() == 'Darwin' else ['ldd']
            for f in os.listdir(lib_dir):
                full_path = os.path.join(lib_dir, f)
                mx.run(['file', full_path], nonZeroIsFatal=False)
                mx.log('---\nobjdump:')
                mx.run(['objdump', '-s', '--section', '.comment', full_path],
                       nonZeroIsFatal=False)
                mx.log('---\nlinking info:')
                mx.run(ldd + [full_path], nonZeroIsFatal=False)
                mx.log('---------')

    # ---------------------------------
    # Style checks:

    # FastR has custom copyright check
    with mx_gate.Task('Copyright check', tasks,
                      tags=[mx_gate.Tags.style]) as t:
        if t:
            if mx.checkcopyrights(['--primary']) != 0:
                t.abort('copyright errors')

    # check that the expected test output file is up to date
    with mx_gate.Task('UnitTests: ExpectedTestOutput file check',
                      tasks,
                      tags=[mx_gate.Tags.style]) as t:
        if t:
            mx_unittest.unittest([
                '-Dfastr.test.gen.expected=' +
                _test_srcdir(), '-Dfastr.test.check.expected=true'
            ] + _gate_unit_tests())

    # ----------------------------------
    # Basic tests:

    with mx_gate.Task(
            'UnitTests',
            tasks,
            tags=[FastRGateTags.basic_tests, FastRGateTags.unit_tests]) as t:
        if t:
            mx_unittest.unittest(_gate_noapps_unit_tests())

    with mx_gate.Task('Rembedded', tasks,
                      tags=[FastRGateTags.basic_tests]) as t:
        if t:
            if rembedtest([]) != 0:
                t.abort("Rembedded tests failed")

    # ----------------------------------
    # Package tests:

    with mx_gate.Task('Recommended load test',
                      tasks,
                      tags=[FastRGateTags.recommended_load]) as t:
        if t:
            # Note: this is a convenience mx gate job for testing the loading of recommended packages
            # We also test the loading of recommended pkgs in the "graalvm-tests"
            if not os.path.exists(
                    os.path.join(_fastr_suite.dir, 'library', 'spatial')):
                mx.abort(
                    'Recommended packages seem to be not installed in FastR. Did you forget to build with FASTR_RELEASE=true?'
                )
            pkgs = [
                'codetools', 'MASS', 'boot', 'class', 'cluster', 'lattice',
                'nnet', 'spatial', 'Matrix', 'KernSmooth', 'foreign', 'nlme',
                'rpart', 'survival'
            ]
            # Creates code that looks like: require(codetools) && require(MASS) && ...
            require_stmts = ' && '.join(
                ['require(' + pkg + ')' for pkg in pkgs])
            test_load = 'if (!(' + require_stmts + ')) q(status=1) else q(status=42)'
            if run_r(['--vanilla', '-e', test_load], 'R',
                     nonZeroIsFatal=False) != 42:
                mx.abort("Loading of recommended packages failed")

    with mx_gate.Task('Internal pkg test',
                      tasks,
                      tags=[FastRGateTags.internal_pkgs_test]) as t:
        if t:
            internal_pkg_tests()

    # CRAN packages are listed in files com.oracle.truffle.r.test.packages/gated0, gated1, ...
    # We loop over all such files and crete gate task for each of them
    # See also documentation in FastRGateTags.cran_pkgs_tests
    for i in range(1, 1000):
        list_file = os.path.join(
            _fastr_suite.dir,
            'com.oracle.truffle.r.test.packages/gated' + str(i))
        if not os.path.exists(list_file):
            break
        with mx_gate.Task('CRAN pkg test: ' + str(i),
                          tasks,
                          tags=[FastRGateTags.cran_pkgs_test + str(i)]) as t:
            if t:
                check_last = False if mx_gate.Task.tags is None else FastRGateTags.cran_pkgs_test_check_last in mx_gate.Task.tags  # pylint: disable=unsupported-membership-test
                if check_last:
                    next_file = os.path.join(
                        _fastr_suite.dir,
                        'com.oracle.truffle.r.test.packages/gated' +
                        str(i + 1))
                    if os.path.exists(next_file):
                        mx.abort(
                            "File %s exists, but the gate thinks that %s is the last file. Did you forget to update the gate configuration?"
                            % (next_file, list_file))
                cran_pkg_tests(list_file)
示例#32
0
def _mxrun(args, cwd=_suite.dir, verbose=False, out=None):
    if verbose:
        mx.log('Running \'{}\''.format(' '.join(args)))
    status = mx.run(args, nonZeroIsFatal=False, cwd=cwd, out=out)
    if status:
        mx.abort(status)
示例#33
0
 def stdout_collector(x):
     stdoutdata.append(x)
     mx.log(x.rstrip())
示例#34
0
 def log_stat(prefix, file_name):
     file_stat = os.stat(file_name)
     mx.log('    ' + prefix + '.st_mode: ' + oct(file_stat.st_mode))
     mx.log('    ' + prefix + '.st_mtime: ' + time.strftime(
         '%Y-%m-%dT%H:%M:%SZ', time.gmtime(file_stat.st_mtime)))
示例#35
0
    def build(self):
        if not os.environ.has_key('FASTR_RELEASE'):
            mx.log('FastR: set FASTR_RELEASE to update release project')
            return
        # copy the release directories
        output_dir = self.subject.dir
        fastr_dir = mx_fastr._fastr_suite.dir
        for d in ['bin', 'include', 'library', 'etc', 'share', 'doc']:
            target_dir = join(output_dir, d)
            if os.path.exists(target_dir):
                shutil.rmtree(target_dir)
            shutil.copytree(join(fastr_dir, d), target_dir)

        lib_fastr_dir = join(fastr_dir, 'lib')
        lib_output_dir = join(output_dir, 'lib')
        if os.path.exists(lib_output_dir):
            shutil.rmtree(lib_output_dir)
        os.mkdir(lib_output_dir)
        for f in os.listdir(lib_fastr_dir):
            source_file = join(lib_fastr_dir, f)
            target_file = join(lib_output_dir, f)
            if f != '.DS_Store':
                if os.path.islink(source_file):
                    os.symlink(os.readlink(source_file), target_file)
                else:
                    shutil.copy(source_file, target_file)

        # copyrights
        copyrights_dir = join(fastr_dir, 'mx.fastr', 'copyrights')
        with open(join(output_dir, 'COPYRIGHT'), 'w') as outfile:
            for copyright_file in os.listdir(copyrights_dir):
                basename = os.path.basename(copyright_file)
                if basename.endswith('copyright.star'):
                    with open(join(copyrights_dir, copyright_file)) as infile:
                        data = infile.read()
                        outfile.write(data)
        # license/README
        shutil.copy(join(fastr_dir, 'LICENSE'), output_dir)
        shutil.copy(join(fastr_dir, 'README.md'), output_dir)

        # canonicalize R_HOME_DIR in bin/R
        bin_dir = join(output_dir, 'bin')
        rcmd = join(bin_dir, 'R')
        # R is the generic shell script (taken essentially verbatim from GNU R)
        with open(rcmd) as f:
            lines = f.readlines()
        with open(rcmd, 'w') as f:
            for line in lines:
                if line.startswith('R_HOME_DIR='):
                    f.write(
"""
source="${BASH_SOURCE[0]}"
while [ -h "$source" ] ; do
  prev_source="$source"
  source="$(readlink "$source")";
  if [[ "$source" != /* ]]; then
    # if the link was relative, it was relative to where it came from
    dir="$( cd -P "$( dirname "$prev_source" )" && pwd )"
    source="$dir/$source"
  fi
done
r_bin="$( cd -P "$( dirname "$source" )" && pwd )"
R_HOME_DIR="$( dirname "$r_bin" )"
""")
                elif line.strip() == "#!/bin/sh":
                    f.write("#!/usr/bin/env bash\n")
                else:
                    f.write(line)
        # jar files for the launchers
        jars_dir = join(bin_dir, 'fastr_jars')
        if not os.path.exists(jars_dir):
            os.mkdir(jars_dir)
        fastr_classfiles = dict()

        # visitor to collect/copy all the classes/jar files needed by the launchers
        def dep_visit(dep, edge):
            if isinstance(dep, mx.JARDistribution):
                shutil.copy(join(dep.suite.dir, dep.path), jars_dir)
            elif isinstance(dep, mx.Library):
                if not dep.name.lower() == 'jdk_tools':
                    jar_name = dep.name.lower() + '.jar'
                    shutil.copyfile(join(dep.suite.dir, dep.path), join(jars_dir, jar_name))
            elif isinstance(dep, mx.JavaProject):
                if 'com.oracle.truffle.r' in dep.name:
                    classfiles_dir = dep.output_dir()
                    for root, _, classfiles in os.walk(classfiles_dir):
                        for classfile in classfiles:
                            fastr_classfiles[os.path.relpath(join(root, classfile), classfiles_dir)] = join(root, classfile)

        self.subject.walk_deps(visit=dep_visit)

        # create the fastr.jar file
        with mx.Archiver(join(jars_dir, 'fastr.jar')) as arc:
            arc.zf.writestr("META-INF/MANIFEST.MF", "Manifest-Version: 1.0\n")
            for arcname, path in fastr_classfiles.iteritems():
                with open(path, 'r') as f:
                    contents = f.read()
                arc.zf.writestr(arcname, contents)

        # create the classpath string
        classpath = []
        for _, _, jars in os.walk(jars_dir):
            for jar in jars:
                classpath.append(join("$R_HOME/bin/fastr_jars", jar))
        classpath_string = ":".join(classpath)

        # replace the mx exec scripts with native Java launchers, setting the classpath from above
        bin_exec_dir = join(bin_dir, 'exec')
        r_launcher = join(self.subject.dir, 'src', 'R_launcher')
        template_dict = {'CLASSPATH': classpath_string}
        self._template(r_launcher, join(bin_exec_dir, 'R'), template_dict)
        shutil.rmtree(join(bin_dir, 'execRextras'))
        rscript_launcher = join(self.subject.dir, 'src', 'Rscript_launcher')
        self._template(rscript_launcher, join(bin_dir, 'Rscript'), template_dict)
示例#36
0
 def git_log_filtering_strategy(self):
     mx.log('Commits strategy from GraalVM')
     git_log_strategy = ['git', 'log', '--first-parent']
     return git_log_strategy
示例#37
0
    def build(self):
        if 'FASTR_RELEASE' not in os.environ:
            mx.log('FastR: set FASTR_RELEASE to update release project')
            return
        # copy the release directories
        output_dir = self.subject.dir
        fastr_dir = _fastr_suite.dir
        for d in ['bin', 'include', 'library', 'etc', 'share', 'doc']:
            target_dir = join(output_dir, d)
            if os.path.exists(target_dir):
                shutil.rmtree(target_dir)
            shutil.copytree(join(fastr_dir, d), target_dir)

        lib_fastr_dir = join(fastr_dir, 'lib')
        lib_output_dir = join(output_dir, 'lib')
        if os.path.exists(lib_output_dir):
            shutil.rmtree(lib_output_dir)
        os.mkdir(lib_output_dir)
        for f in os.listdir(lib_fastr_dir):
            source_file = join(lib_fastr_dir, f)
            target_file = join(lib_output_dir, f)
            if f != '.DS_Store':
                if os.path.islink(source_file):
                    os.symlink(os.readlink(source_file), target_file)
                else:
                    shutil.copy(source_file, target_file)

        # copyrights
        copyrights_dir = join(fastr_dir, 'mx.fastr', 'copyrights')
        with open(join(output_dir, 'COPYRIGHT'), 'w') as outfile:
            for copyright_file in os.listdir(copyrights_dir):
                if basename(copyright_file).endswith('copyright.star'):
                    with open(join(copyrights_dir, copyright_file)) as infile:
                        data = infile.read()
                        outfile.write(data)
        # license/README
        shutil.copy(join(fastr_dir, 'LICENSE'), output_dir)
        shutil.copy(join(fastr_dir, 'README.md'), output_dir)

        # canonicalize R_HOME_DIR in bin/R
        bin_dir = join(output_dir, 'bin')
        rcmd = join(bin_dir, 'R')
        # R is the generic shell script (taken essentially verbatim from GNU R)
        with open(rcmd) as f:
            lines = f.readlines()
        with open(rcmd, 'w') as f:
            for line in lines:
                if line.startswith('R_HOME_DIR='):
                    f.write("""
source="${BASH_SOURCE[0]}"
while [ -h "$source" ] ; do
  prev_source="$source"
  source="$(readlink "$source")";
  if [[ "$source" != /* ]]; then
    # if the link was relative, it was relative to where it came from
    dir="$( cd -P "$( dirname "$prev_source" )" && pwd )"
    source="$dir/$source"
  fi
done
r_bin="$( cd -P "$( dirname "$source" )" && pwd )"
R_HOME_DIR="$( dirname "$r_bin" )"
""")
                elif line.strip() == "#!/bin/sh":
                    f.write("#!/usr/bin/env bash\n")
                else:
                    f.write(line)
        # jar files for the launchers
        jars_dir = join(bin_dir, 'fastr_jars')
        if not os.path.exists(jars_dir):
            os.mkdir(jars_dir)

        # Copy all the jar files needed by the launchers
        for e in mx.classpath_entries('FASTR'):
            source_path = e.classpath_repr()
            if mx.is_cache_path(source_path):
                target_file_name = e.name.lower().replace('_', '-') + '.jar'
            else:
                target_file_name = basename(source_path)
            assert isfile(source_path)
            shutil.copy(source_path, join(jars_dir, target_file_name))

        # create the classpath string
        classpath = []
        for _, _, jars in os.walk(jars_dir):
            for jar in jars:
                classpath.append(join("$R_HOME/bin/fastr_jars", jar))
        classpath_string = ":".join(classpath)

        # replace the mx exec scripts with native Java launchers, setting the classpath from above
        bin_exec_dir = join(bin_dir, 'exec')
        r_launcher = join(self.subject.dir, 'src', 'R_legacy')
        template_dict = {'CLASSPATH': classpath_string}
        self._template(r_launcher, join(bin_exec_dir, 'R'), template_dict)
        shutil.rmtree(join(bin_dir, 'execRextras'))
        rscript_launcher = join(self.subject.dir, 'src', 'Rscript_legacy')
        self._template(rscript_launcher, join(bin_dir, 'Rscript'),
                       template_dict)
示例#38
0
def _parseVmArgs(args, addDefaultArgs=True):
    args = mx.expand_project_in_args(args, insitu=False)

    argsPrefix = []
    jacocoArgs = mx_gate.get_jacoco_agent_args()
    if jacocoArgs:
        argsPrefix.extend(jacocoArgs)

    # add default graal.options.file
    options_file = join(mx.primary_suite().dir, 'graal.options')
    if exists(options_file):
        argsPrefix.append('-Dgraal.options.file=' + options_file)

    if isJDK8:
        argsPrefix.append('-Djvmci.class.path.append=' +
                          os.pathsep.join((e.get_path()
                                           for e in _jvmci_classpath)))
        argsPrefix.append('-Xbootclasspath/a:' + os.pathsep.join(
            [dep.classpath_repr() for dep in _bootclasspath_appends]))
    else:
        deployedDists = [entry.dist() for entry in _jvmci_classpath] + \
                        [e for e in _bootclasspath_appends if e.isJARDistribution()]
        deployedModules = [as_java_module(dist, jdk) for dist in deployedDists]

        # Set or update module path to include Graal and its dependencies as modules
        jdkModuleNames = frozenset([m.name for m in jdk.get_modules()])
        graalModulepath = []
        graalUpgrademodulepath = []

        def _addToModulepath(modules):
            for m in modules:
                if m.jarpath:
                    modulepath = graalModulepath if m.name not in jdkModuleNames else graalUpgrademodulepath
                    if m not in modulepath:
                        modulepath.append(m)

        for deployedModule in deployedModules:
            _addToModulepath(deployedModule.modulepath)
            _addToModulepath([deployedModule])

        # Extend or set --module-path argument
        mpUpdated = False
        for mpIndex in range(len(args)):
            assert not args[mpIndex].startswith('--upgrade-module-path')
            if args[mpIndex] == '--module-path':
                assert mpIndex + 1 < len(args), 'VM option ' + args[
                    mpIndex] + ' requires an argument'
                args[mpIndex + 1] = os.pathsep.join(
                    _uniqify(args[mpIndex + 1].split(os.pathsep) +
                             [m.jarpath for m in graalModulepath]))
                mpUpdated = True
                break
            elif args[mpIndex].startswith('--module-path='):
                mp = args[mpIndex][len('--module-path='):]
                args[mpIndex] = '--module-path=' + os.pathsep.join(
                    _uniqify(
                        mp.split(os.pathsep) +
                        [m.jarpath for m in graalModulepath]))
                mpUpdated = True
                break
        if not mpUpdated:
            argsPrefix.append(
                '--module-path=' +
                os.pathsep.join([m.jarpath for m in graalModulepath]))

        if graalUpgrademodulepath:
            argsPrefix.append(
                '--upgrade-module-path=' +
                os.pathsep.join([m.jarpath for m in graalUpgrademodulepath]))

    if '-version' in args:
        ignoredArgs = args[args.index('-version') + 1:]
        if len(ignoredArgs) > 0:
            mx.log(
                "Warning: The following options will be ignored by the VM because they come after the '-version' argument: "
                + ' '.join(ignoredArgs))

    return jdk.processArgs(argsPrefix + args, addDefaultArgs=addDefaultArgs)
示例#39
0
def verify_jvmci_ci_versions(args):
    """
    Checks that the jvmci versions used in various ci files agree.

    If the ci.hocon files use a -dev version, it allows the travis ones to use the previous version.
    For example, if ci.hocon uses jvmci-0.24-dev, travis may use either jvmci-0.24-dev or jvmci-0.23
    """
    version_pattern = re.compile(
        r'^(?!\s*#).*jvmci-(?P<version>\d*\.\d*)(?P<dev>-dev)?')

    def _grep_version(files, msg):
        version = None
        dev = None
        last = None
        linenr = 0
        for filename in files:
            for line in open(filename):
                m = version_pattern.search(line)
                if m:
                    new_version = m.group('version')
                    new_dev = bool(m.group('dev'))
                    if (version and version != new_version) or (
                            dev is not None and dev != new_dev):
                        mx.abort(
                            os.linesep.join([
                                "Multiple JVMCI versions found in {0} files:".
                                format(msg),
                                "  {0} in {1}:{2}:    {3}".format(
                                    version + ('-dev' if dev else ''), *last),
                                "  {0} in {1}:{2}:    {3}".format(
                                    new_version + ('-dev' if new_dev else ''),
                                    filename, linenr, line),
                            ]))
                    last = (filename, linenr, line.rstrip())
                    version = new_version
                    dev = new_dev
                linenr += 1
        if not version:
            mx.abort("No JVMCI version found in {0} files!".format(msg))
        return version, dev

    hocon_version, hocon_dev = _grep_version(
        glob.glob(join(mx.primary_suite().vc_dir, '*.hocon')) +
        glob.glob(join(mx.primary_suite().dir, 'ci*.hocon')) +
        glob.glob(join(mx.primary_suite().dir, 'ci*/*.hocon')), 'ci.hocon')
    travis_version, travis_dev = _grep_version(glob.glob('.travis.yml'),
                                               'TravisCI')

    if hocon_version != travis_version or hocon_dev != travis_dev:
        versions_ok = False
        if not travis_dev and hocon_dev:
            next_travis_version = [int(a) for a in travis_version.split('.')]
            next_travis_version[-1] += 1
            next_travis_version_str = '.'.join(
                (str(a) for a in next_travis_version))
            if next_travis_version_str == hocon_version:
                versions_ok = True
        if not versions_ok:
            mx.abort(
                "Travis and ci.hocon JVMCI versions do not match: {0} vs. {1}".
                format(travis_version + ('-dev' if travis_dev else ''),
                       hocon_version + ('-dev' if hocon_dev else '')))
    mx.log('JVMCI versions are ok!')
示例#40
0
 def eat(self, line):
     mx.log(line.rstrip())
     self.f.write(line)
示例#41
0
文件: mx_gate.py 项目: JornVernee/mx
def _run_gate(cleanArgs, args, tasks):
    global _jacoco
    with Task('Versions', tasks, tags=[Tags.always]) as t:
        if t:
            mx.command_function('version')(['--oneline'])
            mx.command_function('sversions')([])

    with Task('JDKReleaseInfo', tasks, tags=[Tags.always]) as t:
        if t:
            jdkDirs = os.pathsep.join([mx.get_env('JAVA_HOME', ''), mx.get_env('EXTRA_JAVA_HOMES', '')])
            for jdkDir in jdkDirs.split(os.pathsep):
                release = join(jdkDir, 'release')
                if exists(release):
                    mx.log('==== ' + jdkDir + ' ====')
                    with open(release) as fp:
                        mx.log(fp.read().strip())

    for suiteRunner in _pre_gate_runners:
        suite, runner = suiteRunner
        if args.all_suites or suite is mx.primary_suite():
            runner(args, tasks)

    with Task('Pylint', tasks, tags=[Tags.style]) as t:
        if t:
            if mx.command_function('pylint')(['--primary']) != 0:
                _warn_or_abort('Pylint not configured correctly. Cannot execute Pylint task.', args.strict_mode)

    gate_clean(cleanArgs, tasks, tags=[Tags.build, Tags.fullbuild, Tags.ecjbuild])

    with Task('Distribution Overlap Check', tasks, tags=[Tags.style]) as t:
        if t:
            if mx.command_function('checkoverlap')([]) != 0:
                t.abort('Found overlapping distributions.')

    with Task('Canonicalization Check', tasks, tags=[Tags.style]) as t:
        if t:
            mx.log(time.strftime('%d %b %Y %H:%M:%S - Ensuring mx/projects files are canonicalized...'))
            if mx.command_function('canonicalizeprojects')([]) != 0:
                t.abort('Rerun "mx canonicalizeprojects" and modify the suite.py files as suggested.')

    with Task('Verify Java Sources in Project', tasks, tags=[Tags.style]) as t:
        if t:
            mx.log(time.strftime('%d %b %Y %H:%M:%S - Ensuring all Java sources are in a Java project directory...'))
            if mx.command_function('verifysourceinproject')([]) != 0:
                t.abort('Move or delete the Java sources that are not in a Java project directory.')

    if mx._is_supported_by_jdt(mx.DEFAULT_JDK_TAG):
        with Task('BuildWithEcj', tasks, tags=[Tags.fullbuild, Tags.ecjbuild], legacyTitles=['BuildJavaWithEcj']) as t:
            if t:
                defaultBuildArgs = ['-p']
                fullbuild = True if Task.tags is None else Tags.fullbuild in Task.tags
                # Using ecj alone is not compatible with --warning-as-error (see GR-3969)
                if not args.no_warning_as_error and fullbuild:
                    defaultBuildArgs += ['--warning-as-error']
                if mx.get_env('JDT'):
                    mx.command_function('build')(defaultBuildArgs + args.extra_build_args)
                    if fullbuild:
                        gate_clean(cleanArgs, tasks, name='CleanAfterEcjBuild', tags=[Tags.fullbuild])
                else:
                    _warn_or_abort('JDT environment variable not set. Cannot execute BuildWithEcj task.', args.strict_mode)

    with Task('BuildWithJavac', tasks, tags=[Tags.build, Tags.fullbuild], legacyTitles=['BuildJavaWithJavac']) as t:
        if t:
            defaultBuildArgs = ['-p']
            if not args.no_warning_as_error:
                defaultBuildArgs += ['--warning-as-error']
            mx.command_function('build')(defaultBuildArgs + ['--force-javac'] + args.extra_build_args)

    with Task('IDEConfigCheck', tasks, tags=[Tags.fullbuild]) as t:
        if t:
            if args.cleanIDE:
                mx.command_function('ideclean')([])
                mx.command_function('ideinit')([])

    with Task('CodeFormatCheck', tasks, tags=[Tags.style]) as t:
        if t:
            eclipse_exe = mx.get_env('ECLIPSE_EXE')
            if eclipse_exe is not None:
                if mx.command_function('eclipseformat')(['-e', eclipse_exe, '--primary']) != 0:
                    t.abort('Formatter modified files - run "mx eclipseformat", check in changes and repush')
            else:
                _warn_or_abort('ECLIPSE_EXE environment variable not set. Cannot execute CodeFormatCheck task.', args.strict_mode)

    with Task('Checkstyle', tasks, tags=[Tags.style]) as t:
        if t and mx.command_function('checkstyle')(['--primary']) != 0:
            t.abort('Checkstyle warnings were found')

    with Task('FindBugs', tasks, tags=[Tags.fullbuild]) as t:
        if t and mx.command_function('findbugs')([]) != 0:
            t.abort('FindBugs warnings were found')

    with Task('VerifyLibraryURLs', tasks, tags=[Tags.fullbuild]) as t:
        if t:
            mx.command_function('verifylibraryurls')([])

    jacoco_exec = 'jacoco.exec'
    if exists(jacoco_exec):
        os.unlink(jacoco_exec)

    if args.jacocout is not None:
        _jacoco = 'append'
    else:
        _jacoco = 'off'

    for suiteRunner in _gate_runners:
        suite, runner = suiteRunner
        if args.all_suites or suite is mx.primary_suite():
            runner(args, tasks)

    if args.jacocout is not None:
        jacoco_args = [args.jacocout]
        if args.jacoco_omit_excluded:
            jacoco_args = ['--omit-excluded'] + jacoco_args
        mx.command_function('jacocoreport')(jacoco_args)
        _jacoco = 'off'
    if args.jacoco_zip is not None:
        mx.log('Creating JaCoCo report archive: {}'.format(args.jacoco_zip))
        with zipfile.ZipFile(args.jacoco_zip, 'w', compression=zipfile.ZIP_DEFLATED) as zf:
            zf.write(jacoco_exec, join(args.jacocout, jacoco_exec))
            for root, _, files in os.walk(args.jacocout):
                for f in files:
                    zf.write(os.path.join(root, f))
        mx.log('Archiving done.')
示例#42
0
文件: mx_gate.py 项目: JornVernee/mx
 def mx_command_entered(command, *args, **kwargs):
     global _command_level
     if _command_level is 0:
         all_commands.append((command.command, args, kwargs))
         mx.log(mx.colorize('Running: ' + command_in_gate_message(command.command, args, kwargs), color='blue'))
     _command_level = _command_level + 1
示例#43
0
文件: mx_gate.py 项目: JornVernee/mx
def gate(args):
    """run the tests used to validate a push

    If this command exits with a 0 exit code, then the gate passed."""

    parser = ArgumentParser(prog='mx gate')
    add_omit_clean_args(parser)
    parser.add_argument('--all-suites', action='store_true', help='run gate tasks for all suites, not just the primary suite')
    parser.add_argument('--dry-run', action='store_true', help='just show the tasks that will be run without running them')
    parser.add_argument('-x', action='store_true', help='makes --task-filter or --tags an exclusion instead of inclusion filter')
    jacoco = parser.add_mutually_exclusive_group()
    jacoco.add_argument('--jacocout', help='specify the output directory for jacoco report')
    jacoco.add_argument('--jacoco-zip', help='specify the output zip file for jacoco report')
    parser.add_argument('--jacoco-omit-excluded', action='store_true', help='omit excluded files from jacoco report')
    parser.add_argument('--strict-mode', action='store_true', help='abort if a task cannot be executed due to missing tool configuration')
    parser.add_argument('--no-warning-as-error', action='store_true', help='compile warnings are not treated as errors')
    parser.add_argument('-B', dest='extra_build_args', action='append', metavar='<build_args>', help='append additional arguments to mx build commands used in the gate')
    parser.add_argument('-p', '--partial', help='run only a subset of the tasks in the gate (index/total). Eg. "--partial 2/5" runs the second fifth of the tasks in the gate. Tasks with tag build are repeated for each run.')
    filtering = parser.add_mutually_exclusive_group()
    filtering.add_argument('-t', '--task-filter', help='comma separated list of substrings to select subset of tasks to be run')
    filtering.add_argument('-s', '--start-at', help='substring to select starting task')
    filtering.add_argument('--tags', help='comma separated list of tags to select subset of tasks to be run. Tags can have a range specifier `name[:from:[to]]`.'
                           'If present only the [from,to) tasks are executed. If `to` is omitted all tasks starting with `from` are executed.')

    for a, k in _extra_gate_arguments:
        parser.add_argument(*a, **k)
    args = parser.parse_args(args)
    cleanArgs = check_gate_noclean_arg(args)

    if args.dry_run:
        Task.dryRun = True
    if args.start_at:
        Task.startAtFilter = args.start_at
    elif args.task_filter:
        Task.filters = args.task_filter.split(',')
        Task.filtersExclude = args.x
    elif args.tags:
        parse_tags_argument(args.tags, args.x)
        Task.tagsExclude = args.x
        if not Task.tagsExclude:
            # implicitly include 'always'
            Task.tags += [Tags.always]
    elif args.x:
        mx.abort('-x option cannot be used without --task-filter or the --tags option')

    if args.jacoco_zip:
        args.jacocout = 'html'

    if not args.extra_build_args:
        args.extra_build_args = []

    if args.partial:
        partialArgs = args.partial.split('/')
        if len(partialArgs) is not 2:
            mx.abort('invalid partial argument specified')

        selected = int(partialArgs[0]) - 1
        total = int(partialArgs[1])
        if selected < 0 or selected >= total:
            mx.abort('out of bounds partial argument specified')

        tasks = _collect_tasks(cleanArgs, args)

        # build and always tags must be run by every partial gate run
        alwaysTags = [Tags.always, Tags.build]
        buildTasks = [task for task in tasks if not task.skipped and any([f in t for t in alwaysTags for f in task.tags])]
        nonBuildTasks = [task for task in tasks if not task.skipped and not any([f in t for t in alwaysTags for f in task.tags])]

        partialTasks = nonBuildTasks[selected::total]
        runTaskNames = [task.title for task in buildTasks + partialTasks]

        # we have already ran the filters in the dry run when collecting
        # so we can safely overwrite other filter settings.
        Task.filters = runTaskNames
        Task.filtersExclude = False

        mx.log('Running gate with partial tasks ' + args.partial + ". " + str(len(partialTasks)) + " out of " + str(len(nonBuildTasks)) + " non-build tasks selected.")
        if len(partialTasks) is 0:
            mx.log('No partial tasks left to run. Finishing gate early.')
            return

    Task.startTime = time.time()
    tasks = []
    total = Task('Gate')
    all_commands = []

    def shell_quoted_args(args):
        args_string = ' '.join([pipes.quote(str(arg)) for arg in args])
        if args_string is not '':
            args_string = ' ' + args_string
        return args_string

    def mx_command_entered(command, *args, **kwargs):
        global _command_level
        if _command_level is 0:
            all_commands.append((command.command, args, kwargs))
            mx.log(mx.colorize('Running: ' + command_in_gate_message(command.command, args, kwargs), color='blue'))
        _command_level = _command_level + 1

    def mx_command_left(_, *__, **___):
        global _command_level
        assert _command_level >= 0
        _command_level = _command_level - 1

    def print_commands_on_failure():
        message_color = 'red'
        mx.log(mx.colorize('\nThe sequence of mx commands that were executed until the failure follows:\n', color=message_color))
        for command, command_args, kwargs in all_commands:
            message = command_in_gate_message(command, command_args, kwargs)

            mx.log(mx.colorize(message, color=message_color))

    def command_in_gate_message(command, command_args, kwargs):
        one_list = len(command_args) == 1 and isinstance(command_args[0], (list,))
        kwargs_absent = len(kwargs) == 0
        if one_list and kwargs_absent:  # gate command reproducible on the command line
            quoted_args = (' '.join([pipes.quote(str(arg)) for arg in command_args[0]]))
            message = 'mx ' + command + ' ' + quoted_args
        else:
            args_message = '(Programatically executed. '
            if not one_list:
                args_message += 'Args: ' + str(command_args)
            if not kwargs_absent:
                args_message += 'Kwargs: ' + str(kwargs)
            args_message += ')'
            message = 'mx' + shell_quoted_args(_mx_args) + ' ' + command + ' ' + args_message
        return message

    try:
        mx._mx_commands.add_command_callback(mx_command_entered, mx_command_left)
        _run_gate(cleanArgs, args, tasks)
    except KeyboardInterrupt:
        total.abort(1)
    except BaseException as e:
        import traceback
        traceback.print_exc()
        print_commands_on_failure()
        total.abort(str(e))
    finally:
        mx._mx_commands.remove_command_callback(mx_command_entered, mx_command_left)

    total.stop()

    mx.log('Gate task times:')
    for t in tasks:
        mx.log('  ' + str(t.duration) + '\t' + t.title + ("" if not (Task.verbose and t.tags) else (' [' + ','.join(t.tags) + ']')))
    mx.log('  =======')
    mx.log('  ' + str(total.duration))

    if args.task_filter:
        Task.filters = None
示例#44
0
 def default_start_date(self):
     default_start_date = '2020-01-22'
     mx.log('default_start_date: ' + default_start_date)
     return default_start_date
示例#45
0
def symlink_or_copy(target_path, dest_path, debug_gr_8964=False):
    # Follow symbolic links in case they go outside my suite directories.
    real_target_path = os.path.realpath(target_path)
    if debug_gr_8964:
        mx.log('  [mx_substratevm.symlink_or_copy:')
        mx.log('    suite.dir:' + suite.dir)
        mx.log('    target_path: ' + target_path)
        mx.log('    real_target_path: ' + real_target_path)
        mx.log('    dest_path: ' + dest_path)
    if any(
            real_target_path.startswith(s.dir)
            for s in mx.suites(includeBinary=False)):
        # Symbolic link to files in my suites.
        sym_target = os.path.relpath(real_target_path, dirname(dest_path))
        if debug_gr_8964:
            mx.log('      symlink target: ' + sym_target)
        os.symlink(sym_target, dest_path)
    else:
        # Else copy the file to so it can not change out from under me.
        if debug_gr_8964:
            mx.log('      copy2: ')
        copy2(real_target_path, dest_path)
    if debug_gr_8964:
        mx.log('  ]')
示例#46
0
def jt(*args):
    mx.log("\n$ " + ' '.join(['jt'] + list(args)) + "\n")
    mx.run(['ruby', join(root, 'tool/jt.rb')] + list(args))
示例#47
0
 def _collector(x):
     actualOutput.append(x)
     mx.log(x)
示例#48
0
def unittest(args, test_report_tags=None):
    """run the JUnit tests

    :param test_report_tags: in not None, key/value pairs that will be used
             to make a test report for the unit tests. See detail for `tags` parameter
             of `mx_gate.make_test_report` function.
    :return the generated test report iff `test_report_tags` is not None
    """

    junit_arg_actions = []
    junit_args = []

    class MxJUnitWrapperArg(Action):
        def __init__(self, **kwargs):
            kwargs['required'] = False
            Action.__init__(self, **kwargs)
            junit_arg_actions.append(self)

        def __call__(self, parser, namespace, values, option_string=None):
            junit_args.append('-' + self.dest)
            junit_args.append(values)

    class MxJUnitWrapperBoolArg(Action):
        def __init__(self, **kwargs):
            kwargs['required'] = False
            kwargs['nargs'] = 0
            Action.__init__(self, **kwargs)
            junit_arg_actions.append(self)

        def __call__(self, parser, namespace, values, option_string=None):
            junit_args.append('-' + self.dest)

    parser = ArgumentParser(
        prog='mx unittest',
        description='run the JUnit tests',
        formatter_class=RawDescriptionHelpFormatter,
        epilog=unittestHelpSuffix,
    )

    parser.add_argument('--blacklist',
                        help='run all testcases not specified in <file>',
                        metavar='<file>')
    parser.add_argument('--whitelist',
                        help='run testcases specified in <file> only',
                        metavar='<file>')
    parser.add_argument('--verbose',
                        help='enable verbose JUnit output',
                        dest='JUnitVerbose',
                        action=MxJUnitWrapperBoolArg)
    parser.add_argument('--very-verbose',
                        help='enable very verbose JUnit output',
                        dest='JUnitVeryVerbose',
                        action=MxJUnitWrapperBoolArg)
    parser.add_argument(
        '--max-class-failures',
        help=
        'stop after N test classes that have a failure (default is no limit)',
        type=is_strictly_positive,
        metavar='<N>',
        dest='JUnitMaxClassFailures',
        action=MxJUnitWrapperArg)
    parser.add_argument('--fail-fast',
                        help='alias for --max-class-failures=1',
                        dest='JUnitFailFast',
                        action=MxJUnitWrapperBoolArg)
    parser.add_argument(
        '--enable-timing',
        help='enable JUnit test timing (requires --verbose/--very-verbose)',
        dest='JUnitEnableTiming',
        action=MxJUnitWrapperBoolArg)
    parser.add_argument(
        '--regex',
        help='run only testcases matching a regular expression',
        metavar='<regex>')
    parser.add_argument('--color',
                        help='enable color output',
                        dest='JUnitColor',
                        action=MxJUnitWrapperBoolArg)
    parser.add_argument('--gc-after-test',
                        help='force a GC after each test',
                        dest='JUnitGCAfterTest',
                        action=MxJUnitWrapperBoolArg)
    parser.add_argument(
        '--record-results',
        help='record test class results to passed.txt and failed.txt',
        dest='JUnitRecordResults',
        action=MxJUnitWrapperBoolArg)
    record_help_msg_shared = 'If <file> is "-", the tests will be printed to stdout. ' +\
                  'In contrast to --record-results this prints not only the test class but also the test method.'
    parser.add_argument('--print-passed',
                        metavar="<file>",
                        dest='JUnitRecordPassed',
                        action=MxJUnitWrapperArg,
                        help='record passed test class results in <file>. ' +
                        record_help_msg_shared)
    parser.add_argument('--print-failed',
                        metavar="<file>",
                        dest='JUnitRecordFailed',
                        action=MxJUnitWrapperArg,
                        help='record failed test class results in <file>.  ' +
                        record_help_msg_shared)
    parser.add_argument('--json-results',
                        metavar="<file>",
                        help='record test results in <file> in json format.')
    parser.add_argument('--suite',
                        help='run only the unit tests in <suite>',
                        metavar='<suite>')
    parser.add_argument('--repeat',
                        help='run each test <n> times',
                        dest='JUnitRepeat',
                        action=MxJUnitWrapperArg,
                        type=is_strictly_positive,
                        metavar='<n>')
    parser.add_argument(
        '--open-packages',
        dest='JUnitOpenPackages',
        action=MxJUnitWrapperArg,
        metavar='<module>/<package>[=<target-module>(,<target-module>)*]',
        help=
        "export and open packages regardless of module declarations (see more detail and examples below)"
    )
    eagerStacktrace = parser.add_mutually_exclusive_group()
    eagerStacktrace.add_argument(
        '--eager-stacktrace',
        action='store_const',
        const=True,
        dest='eager_stacktrace',
        help='print test errors as they occur (default)')
    eagerStacktrace.add_argument(
        '--no-eager-stacktrace',
        action='store_const',
        const=False,
        dest='eager_stacktrace',
        help='print test errors after all tests have run')

    for a, k in _extra_unittest_arguments:
        parser.add_argument(*a, **k)

    # Augment usage text to mention test filters and options passed to the VM
    usage = parser.format_usage().strip()
    if usage.startswith('usage: '):
        usage = usage[len('usage: '):]
    parser.usage = usage + ' [test filters...] [VM options...]'

    args, parsed_args = parse_split_args(args, parser, "--")

    # Remove junit_args values from parsed_args
    for a in junit_arg_actions:
        parsed_args.__dict__.pop(a.dest)

    if parsed_args.whitelist:
        try:
            with open(parsed_args.whitelist) as fp:
                parsed_args.whitelist = [
                    re.compile(fnmatch.translate(l.rstrip()))
                    for l in fp.readlines() if not l.startswith('#')
                ]
        except IOError:
            mx.log('warning: could not read whitelist: ' +
                   parsed_args.whitelist)
    if parsed_args.blacklist:
        try:
            with open(parsed_args.blacklist) as fp:
                parsed_args.blacklist = [
                    re.compile(fnmatch.translate(l.rstrip()))
                    for l in fp.readlines() if not l.startswith('#')
                ]
        except IOError:
            mx.log('warning: could not read blacklist: ' +
                   parsed_args.blacklist)

    if parsed_args.eager_stacktrace is None:
        junit_args.append('-JUnitEagerStackTrace')
    parsed_args.__dict__.pop('eager_stacktrace')

    make_test_report = test_report_tags is not None

    test_results = parsed_args.__dict__.pop('json_results')
    delete_test_results = False
    if not test_results and make_test_report:
        test_results = tempfile.NamedTemporaryFile(delete=False,
                                                   suffix='.json.gz').name
        delete_test_results = True
    if test_results:
        junit_args.append('-JUnitJsonResults')
        junit_args.append(test_results)
    try:
        _unittest(args, ['@Test', '@Parameters'], junit_args,
                  **parsed_args.__dict__)
        if make_test_report:
            import mx_gate
            return mx_gate.make_test_report(test_results,
                                            tags=test_report_tags)
    finally:
        if delete_test_results:
            os.unlink(test_results)
示例#49
0
def image(args):
    """build a boot image

    Run the BootImageGenerator to build a Maxine boot image. The classes
    and packages specified on the command line will be included in the
    boot image in addition to those found by the Package.java mechanism.
    Package names are differentiated from class names by being prefixed
    with '^'.

    The platform configuration for the generated image is auto-detected
    by native methods. However, the following system properties can be
    used to override the defaults:

    Name            | Description                   | Example values
    ================+===============================+================
    max.platform    | name of a preset platform     | solaris-amd64 linux-amd64 darwin-amd64
    max.cpu         | processor model               | AMD64 IA32 SPARCV9 ARMV7
    max.isa         | instruction set architecture  | AMD64 ARM PPC SPARC
    max.os          | operating system              | Darwin Linux Solaris
    max.endianness  | endianness                    | BIG LITTLE
    max.bits        | machine word size             | 64 32
    max.page        | page size                     | 4096 8192
    max.nsig        | number of signals             | 32
    mas.idiv        | has hw integer divider        | 1/0

    These system properties can be specified as options to the image
    command (e.g. '-os Darwin -bits 32').

    An option starting with '@' denotes one of the preconfigured set of
    options described by running "mx options".

    An option starting with '--' is interpreted as a VM option of the same name
    after the leading '-' is removed. For example, to use the '-verbose:class'
    VM option to trace class loading while image building, specify '--verbose:class'.
    Note that not all VM options have an effect during image building.

    Use "mx image -help" to see what other options this command accepts."""

    systemProps = ['-Xmx1G']
    imageArgs = []
    i = 0
    while i < len(args):
        arg = args[i]
        if arg[0] == '@':
            name = arg.lstrip('@')
            configs = _configs()
            if not name in configs:
                mx.log()
                mx.abort('Invalid image configuration: ' + name)
            if "graal" in name:
                systemProps += ['-ea', '-esa']
            values = configs[name].split('@')
            del args[i]
            args[i:i] = values
            continue
        elif arg in [
                '-platform', '-cpu', '-isa', '-os', '-endianness', '-bits',
                '-page', '-nsig', '-idiv'
        ]:
            name = arg.lstrip('-')
            i += 1
            if i == len(args):
                mx.abort('Missing value for ' + arg)
            value = args[i]
            systemProps += ['-Dmax.' + name + '=' + value]
        elif arg.startswith('--XX:LogFile='):
            os.environ['MAXINE_LOG_FILE'] = arg.split('=', 1)[1]
        elif arg.startswith('--XX:+PrintCFGToFile'):
            os.environ['PRINT_CFG'] = '1'
        elif arg.startswith('--XX:+EnableBootImageDebugMethodID'):
            os.environ['ENABLE_DEBUG_METHODS_ID'] = '1'
        elif arg.startswith('--XX:+PrintHIR'):
            os.environ['PRINT_HIR'] = '1'
        elif arg.startswith('--XX:PrintFilter='):
            os.environ['PRINT_FILTER'] = arg.split('=', 1)[1]
        elif arg == '-vma':
            systemProps += ['-Dmax.permsize=2']
        else:
            imageArgs += [arg]
        i += 1

    mx.run_java(
        ['-Xbootclasspath/a:' + mx.distribution('GRAAL').path] + systemProps +
        [
            '-cp',
            sanitized_classpath(), 'com.sun.max.vm.hosted.BootImageGenerator',
            '-trace=1', '-run=java'
        ] + imageArgs)
示例#50
0
    def run_java(self, args, out=None, err=None, cwd=None, nonZeroIsFatal=False):
        if '-version' in args:
            return super(NativeImageVM, self).run_java(args, out=out, err=err, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal)
        else:
            config = NativeImageVM.BenchmarkConfig()
            original_java_run_args = config.parse(args)
            executable, classpath_arguments, system_properties, image_run_args = NativeImageVM.extract_benchmark_arguments(original_java_run_args)

            # Agent configuration and/or HotSpot profiling
            needs_config = (config.config_dir is None) and config.needs_config
            if needs_config or self.hotspot_pgo:
                config_vm_args = []
                profiling_vm_args = []
                config_run_args = []
                profiling_run_args = []

                if needs_config:
                    config.config_dir = mx.mkdtemp(suffix='config', prefix='native-image')
                    config_vm_args += ['-agentlib:native-image-agent=config-output-dir=' + str(config.config_dir)]
                    config_run_args += config.extra_agent_run_args

                if self.hotspot_pgo:
                    profiling_vm_args += ['-Dgraal.ProfilesCollectExperimental=true']
                    config_run_args = []
                    profiling_run_args = config.extra_profile_run_args

                agent_run_args = config_vm_args + profiling_vm_args + original_java_run_args + config.extra_run_args + config_run_args + profiling_run_args
                mx.log('Running with HotSpot to get the configuration files and profiles. This could take a while:')
                super(NativeImageVM, self).run_java(agent_run_args,
                                                    out=None, err=None, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal)

            base_image_build_args = [os.path.join(mx_vm.graalvm_home(fatalIfMissing=True), 'bin', 'native-image')]
            base_image_build_args += ['--no-fallback']
            base_image_build_args += ['-J-ea', '-J-esa']
            base_image_build_args += system_properties
            base_image_build_args += classpath_arguments
            executable_name = (executable[1] if executable[0] == '-jar' else executable[0]).lower()
            base_image_build_args += executable
            base_image_build_args += ['-H:Name=' + executable_name]
            if needs_config:
                base_image_build_args += ['-H:ConfigurationFileDirectories=' + config.config_dir]
            base_image_build_args += config.extra_image_build_arguments

            # PGO instrumentation
            i = 0
            while i < self.pgo_instrumented_iterations:
                instrument_args = ['--pgo-instrument'] + ([] if i == 0 and not self.hotspot_pgo else ['--pgo'])
                instrument_image_build_args = base_image_build_args + instrument_args
                mx.log('Building the instrumentation image with: ')
                mx.log(' ' + ' '.join([pipes.quote(str(arg)) for arg in instrument_image_build_args]))
                mx.run(instrument_image_build_args, out=None, err=None, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal)

                image_run_cmd = [os.path.abspath(executable_name)] + image_run_args + config.extra_run_args + config.extra_profile_run_args
                mx.log('Running the instrumented image with: ')
                mx.log(' ' + ' '.join([pipes.quote(str(arg)) for arg in image_run_cmd]))
                mx.run(image_run_cmd, out=out, err=err, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal)

                i += 1

            # Build the final image
            pgo_args = ['--pgo'] if self.pgo_instrumented_iterations > 0 or self.hotspot_pgo else []
            final_image_args = base_image_build_args + pgo_args
            mx.log('Building the final image with: ')
            mx.log(' ' + ' '.join([pipes.quote(str(arg)) for arg in final_image_args]))
            mx.run(final_image_args, out=None, err=None, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal)

            # Execute the benchmark
            image_run_cmd = [os.path.abspath(executable_name)] + image_run_args + config.extra_run_args
            mx.log('Running the produced native executable with: ')
            mx.log(' ' + ' '.join([pipes.quote(str(arg)) for arg in image_run_cmd]))
            mx.run(image_run_cmd, out=out, err=err, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal)
示例#51
0
def gate(args):
    """run the tests used to validate a push to the stable Maxine repository

    If this commands exits with a 0 exit code, then the source code is in
    a state that would be accepted for integration into the main repository."""

    check = True
    testArgs = []

    i = 0
    while i < len(args):
        arg = args[i]
        if arg == '-nocheck':
            check = False
        else:
            testArgs += [arg]
        i += 1

    if check:
        mx._opts.specific_suite = "maxine"
        if mx.checkstyle([]):
            mx.abort('Checkstyle warnings were found')

        if exists(join(_maxine_home, '.hg')):
            # Copyright check depends on the sources being in a Mercurial repo
            mx.log('Running copycheck')
            hgNode = os.getenv('hg_node')
            if hgNode is None:
                copycheck(
                    ['-modified', '-reporterrors=true', '-continueonerror'])
            else:
                revTip = int(
                    subprocess.check_output(
                        ['hg', 'tip', '--template', "'{rev}'"]).strip("'"))
                revLast = int(
                    subprocess.check_output(
                        ['hg', 'log', '-r', hgNode, '--template',
                         "'{rev}'"]).strip("'"))
                changesetCount = revTip - revLast + 1
                copycheck([
                    '-last=' + str(changesetCount), '-reporterrors=true',
                    '-continueonerror'
                ])

    mx.log('Ensuring JavaTester harness is up to date')
    try:
        jttgen([])
    except SystemExit:
        mx.log(
            'Updated JavaTesterRunScheme.java or JavaTesterTests.java in com.sun.max.vm.jtrun.all.'
        )
        mx.log(
            'To push your changes to the repository, these files need to be generated locally and checked in.'
        )
        mx.log('The files can be generated by running: mx jttgen')
        mx.abort(1)

    mx.log('Ensuring mx/projects files are canonicalized')
    try:
        mx.canonicalizeprojects([])
    except SystemExit:
        mx.log(
            'Rerun "mx canonicalizeprojects" and check-in the modified mx/projects files.'
        )
        mx.abort(1)

    mx.log('Running MaxineTester...')

    test(['-image-configs=java', '-fail-fast'] + testArgs)
    test([
        '-image-configs=ss', '-tests=output:Hello+Catch+GC+WeakRef+Final',
        '-fail-fast'
    ] + testArgs)
示例#52
0
def _intellij_suite(args, s, declared_modules, referenced_modules, sdks, refreshOnly=False, mx_python_modules=False,
                    generate_external_projects=True, java_modules=True, module_files_only=False, generate_native_projects=False):
    libraries = set()
    jdk_libraries = set()

    project_dir = s.dir
    ideaProjectDirectory = join(project_dir, '.idea')

    modulesXml = mx.XMLDoc()
    if not module_files_only and not s.isBinarySuite():
        mx.ensure_dir_exists(ideaProjectDirectory)
        nameFile = join(ideaProjectDirectory, '.name')
        mx.update_file(nameFile, s.name)
        modulesXml.open('project', attributes={'version': '4'})
        modulesXml.open('component', attributes={'name': 'ProjectModuleManager'})
        modulesXml.open('modules')


    def _intellij_exclude_if_exists(xml, p, name, output=False):
        root = p.get_output_root() if output else p.dir
        path = join(root, name)
        if exists(path):
            excludeRoot = p.get_output_root() if output else '$MODULE_DIR$'
            excludePath = join(excludeRoot, name)
            xml.element('excludeFolder', attributes={'url':'file://' + excludePath})

    annotationProcessorProfiles = {}

    def _complianceToIntellijLanguageLevel(compliance):
        # they changed the name format starting with JDK_10
        if compliance.value >= 10:
            # Lastest Idea 2018.2 only understands JDK_11 so clamp at that value
            return 'JDK_' + str(min(compliance.value, 11))
        return 'JDK_1_' + str(compliance.value)

    def _intellij_external_project(externalProjects, sdks, host):
        if externalProjects:
            for project_name, project_definition in externalProjects.items():
                if not project_definition.get('path', None):
                    mx.abort("external project {} is missing path attribute".format(project_name))
                if not project_definition.get('type', None):
                    mx.abort("external project {} is missing type attribute".format(project_name))

                supported = ['path', 'type', 'source', 'test', 'excluded', 'load_path']
                unknown = set(project_definition.keys()) - frozenset(supported)
                if unknown:
                    mx.abort("There are unsupported {} keys in {} external project".format(unknown, project_name))

                path = os.path.realpath(join(host.dir, project_definition["path"]))
                module_type = project_definition["type"]

                moduleXml = mx.XMLDoc()
                moduleXml.open('module',
                               attributes={'type': {'ruby': 'RUBY_MODULE',
                                                    'python': 'PYTHON_MODULE',
                                                    'web': 'WEB_MODULE'}.get(module_type, 'UKNOWN_MODULE'),
                                           'version': '4'})
                moduleXml.open('component',
                               attributes={'name': 'NewModuleRootManager', 'inherit-compiler-output': 'true'})
                moduleXml.element('exclude-output')

                moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'})
                for name in project_definition.get('source', []):
                    moduleXml.element('sourceFolder',
                                      attributes={'url':'file://$MODULE_DIR$/' + name, 'isTestSource': str(False)})
                for name in project_definition.get('test', []):
                    moduleXml.element('sourceFolder',
                                      attributes={'url':'file://$MODULE_DIR$/' + name, 'isTestSource': str(True)})
                for name in project_definition.get('excluded', []):
                    _intellij_exclude_if_exists(moduleXml, type('', (object,), {"dir": path})(), name)
                moduleXml.close('content')

                if module_type == "ruby":
                    moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_ruby_sdk_type, 'jdkName': intellij_get_ruby_sdk_name(sdks)})
                elif module_type == "python":
                    moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_python_sdk_type, 'jdkName': intellij_get_python_sdk_name(sdks)})
                elif module_type == "web":
                    # nothing to do
                    pass
                else:
                    mx.abort("External project type {} not supported".format(module_type))

                moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'})
                moduleXml.close('component')

                load_paths = project_definition.get('load_path', [])
                if load_paths:
                    if not module_type == "ruby":
                        mx.abort("load_path is supported only for ruby type external project")
                    moduleXml.open('component', attributes={'name': 'RModuleSettingsStorage'})
                    load_paths_attributes = {}
                    load_paths_attributes['number'] = str(len(load_paths))
                    for i, name in enumerate(load_paths):
                        load_paths_attributes["string" + str(i)] = "$MODULE_DIR$/" + name
                    moduleXml.element('LOAD_PATH', load_paths_attributes)
                    moduleXml.close('component')

                moduleXml.close('module')
                moduleFile = join(path, project_name + '.iml')
                mx.update_file(moduleFile, moduleXml.xml(indent='  ', newl='\n'))

                if not module_files_only:
                    declared_modules.add(project_name)
                    moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(moduleFile, s.dir)
                    modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath})

    if generate_external_projects:
        for p in s.projects_recursive() + mx._mx_suite.projects_recursive():
            _intellij_external_project(getattr(p, 'externalProjects', None), sdks, p)

    max_checkstyle_version = None
    compilerXml = None

    if java_modules:
        if not module_files_only:
            compilerXml = mx.XMLDoc()
            compilerXml.open('project', attributes={'version': '4'})

        # The IntelliJ parser seems to mishandle empty ADDITIONAL_OPTIONS_OVERRIDE elements
        # so only emit the section if there will be something in it.
        additionalOptionsOverrides = False
        assert not s.isBinarySuite()
        # create the modules (1 IntelliJ module = 1 mx project/distribution)
        for p in s.projects_recursive() + mx._mx_suite.projects_recursive():
            if not p.isJavaProject():
                continue

            jdk = mx.get_jdk(p.javaCompliance)
            assert jdk

            # Value of the $MODULE_DIR$ IntelliJ variable and parent directory of the .iml file.
            module_dir = mx.ensure_dir_exists(p.dir)

            processors = p.annotation_processors()
            if processors:
                annotationProcessorProfiles.setdefault((p.source_gen_dir_name(),) + tuple(processors), []).append(p)

            intellijLanguageLevel = _complianceToIntellijLanguageLevel(p.javaCompliance)

            moduleXml = mx.XMLDoc()
            moduleXml.open('module', attributes={'type': 'JAVA_MODULE', 'version': '4'})

            moduleXml.open('component', attributes={'name': 'NewModuleRootManager', 'LANGUAGE_LEVEL': intellijLanguageLevel, 'inherit-compiler-output': 'false'})
            moduleXml.element('output', attributes={'url': 'file://$MODULE_DIR$/' + os.path.relpath(p.output_dir(), module_dir)})

            moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'})
            for src in p.srcDirs:
                srcDir = mx.ensure_dir_exists(join(p.dir, src))
                moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + os.path.relpath(srcDir, module_dir), 'isTestSource': str(p.is_test_project())})
            for name in ['.externalToolBuilders', '.settings', 'nbproject']:
                _intellij_exclude_if_exists(moduleXml, p, name)
            moduleXml.close('content')

            if processors:
                moduleXml.open('content', attributes={'url': 'file://' + p.get_output_root()})
                genDir = p.source_gen_dir()
                mx.ensure_dir_exists(genDir)
                moduleXml.element('sourceFolder', attributes={'url':'file://' + p.source_gen_dir(), 'isTestSource': str(p.is_test_project()), 'generated': 'true'})
                for name in [basename(p.output_dir())]:
                    _intellij_exclude_if_exists(moduleXml, p, name, output=True)
                moduleXml.close('content')

            moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'})

            proj = p

            dependencies_project_packages = set()

            def should_process_dep(dep, edge):
                if dep.isTARDistribution() or dep.isNativeProject() or dep.isArchivableProject() or dep.isResourceLibrary():
                    mx.logv("Ignoring dependency from {} to {}".format(proj.name, dep.name))
                    return False
                return True

            def process_dep(dep, edge):
                if dep is proj:
                    return
                if dep.isLibrary() or dep.isJARDistribution() or dep.isMavenProject():
                    libraries.add(dep)
                    moduleXml.element('orderEntry', attributes={'type': 'library', 'name': dep.name, 'level': 'project'})
                elif dep.isJavaProject():
                    dependencies_project_packages.update(dep.defined_java_packages())
                    referenced_modules.add(dep.name)
                    moduleXml.element('orderEntry', attributes={'type': 'module', 'module-name': dep.name})
                elif dep.isJdkLibrary():
                    jdk_libraries.add(dep)
                    if jdk.javaCompliance < dep.jdkStandardizedSince:
                        moduleXml.element('orderEntry', attributes={'type': 'library', 'name': dep.name, 'level': 'project'})
                    else:
                        mx.logv("{} skipping {} for {}".format(p, dep, jdk)) #pylint: disable=undefined-loop-variable
                elif dep.isJreLibrary():
                    pass
                elif dep.isClasspathDependency():
                    moduleXml.element('orderEntry', attributes={'type': 'library', 'name': dep.name, 'level': 'project'})
                else:
                    mx.abort("Dependency not supported: {0} ({1})".format(dep, dep.__class__.__name__))

            p.walk_deps(preVisit=should_process_dep, visit=process_dep, ignoredEdges=[mx.DEP_EXCLUDED])

            moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_java_sdk_type, 'jdkName': intellij_get_java_sdk_name(sdks, jdk)})

            moduleXml.close('component')

            if compilerXml and jdk.javaCompliance >= '9':
                moduleDeps = p.get_concealed_imported_packages(jdk=jdk)
                if moduleDeps:
                    exports = sorted([(m, pkgs) for m, pkgs in moduleDeps.items() if dependencies_project_packages.isdisjoint(pkgs)])
                    if exports:
                        args = []
                        exported_modules = set()
                        for m, pkgs in exports:
                            args += ['--add-exports={}/{}=ALL-UNNAMED'.format(m, pkg) for pkg in pkgs]
                            exported_modules.add(m)
                        roots = set(jdk.get_root_modules())
                        observable_modules = jdk.get_modules()
                        default_module_graph = mx_javamodules.get_transitive_closure(roots, observable_modules)
                        module_graph = mx_javamodules.get_transitive_closure(roots | exported_modules, observable_modules)
                        extra_modules = module_graph - default_module_graph
                        if extra_modules:
                            args.append('--add-modules=' + ','.join((m.name for m in extra_modules)))
                        if not additionalOptionsOverrides:
                            additionalOptionsOverrides = True
                            compilerXml.open('component', {'name': 'JavacSettings'})
                            compilerXml.open('option', {'name': 'ADDITIONAL_OPTIONS_OVERRIDE'})
                        compilerXml.element('module', {'name': p.name, 'options': ' '.join(args)})

            # Checkstyle
            csConfig, checkstyleVersion, checkstyleProj = p.get_checkstyle_config()
            if csConfig:
                max_checkstyle_version = max(max_checkstyle_version, mx.VersionSpec(checkstyleVersion)) if max_checkstyle_version else mx.VersionSpec(checkstyleVersion)

                moduleXml.open('component', attributes={'name': 'CheckStyle-IDEA-Module'})
                moduleXml.open('option', attributes={'name': 'configuration'})
                moduleXml.open('map')
                moduleXml.element('entry', attributes={'key': "checkstyle-version", 'value': checkstyleVersion})
                moduleXml.element('entry', attributes={'key': "active-configuration", 'value': "PROJECT_RELATIVE:" + join(checkstyleProj.dir, ".checkstyle_checks.xml") + ":" + checkstyleProj.name})
                moduleXml.close('map')
                moduleXml.close('option')
                moduleXml.close('component')

            moduleXml.close('module')
            moduleFile = join(module_dir, p.name + '.iml')
            mx.update_file(moduleFile, moduleXml.xml(indent='  ', newl='\n').rstrip())

            if not module_files_only:
                declared_modules.add(p.name)
                moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(moduleFile, project_dir)
                modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath})
        if additionalOptionsOverrides:
            compilerXml.close('option')
            compilerXml.close('component')

    if mx_python_modules:

        def _python_module(suite):
            """
            Gets a tuple describing the IntelliJ module for the python sources of `suite`. The tuple
            consists of the module name, module directory and the name of the .iml in the module directory.
            """
            name = basename(suite.mxDir)
            module_dir = suite.mxDir
            return name, mx.ensure_dir_exists(module_dir), name + '.iml'

        def _add_declared_module(suite):
            if not module_files_only:
                name, module_dir, iml_file = _python_module(suite)
                declared_modules.add(name)
                moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(join(module_dir, iml_file), project_dir)
                modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath})

        # mx.<suite> python module:
        _, module_dir, iml_file = _python_module(s)
        moduleXml = mx.XMLDoc()
        moduleXml.open('module', attributes={'type': 'PYTHON_MODULE', 'version': '4'})
        moduleXml.open('component', attributes={'name': 'NewModuleRootManager', 'inherit-compiler-output': 'true'})
        moduleXml.element('exclude-output')

        if s.name == 'mx':
            # MX itself is special. Python sources are also in the parent folder.
            moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$/..'})
            moduleXml.element('sourceFolder', attributes={'url': 'file://$MODULE_DIR$/..', 'isTestSource': 'false'})
        else:
            moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'})
            moduleXml.element('sourceFolder', attributes={'url': 'file://$MODULE_DIR$/' + os.path.relpath(s.mxDir, module_dir), 'isTestSource': 'false'})
        for d in os.listdir(s.mxDir):
            directory = join(s.mxDir, d)
            if isdir(directory) and mx.dir_contains_files_recursively(directory, r".*\.java"):
                moduleXml.element('excludeFolder', attributes={'url': 'file://$MODULE_DIR$/' + os.path.relpath(directory, module_dir)})
        moduleXml.close('content')

        moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_python_sdk_type, 'jdkName': intellij_get_python_sdk_name(sdks)})
        moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'})
        processed_suites = {s.name}

        def _mx_projects_suite(visited_suite, suite_import):
            if suite_import.name in processed_suites:
                return
            processed_suites.add(suite_import.name)
            dep_suite = mx.suite(suite_import.name)
            dep_module_name, _, _ = _python_module(dep_suite)
            moduleXml.element('orderEntry', attributes={'type': 'module', 'module-name': dep_module_name})
            _add_declared_module(dep_suite)
            dep_suite.visit_imports(_mx_projects_suite)
        s.visit_imports(_mx_projects_suite)
        if s.name != 'mx':
            moduleXml.element('orderEntry', attributes={'type': 'module', 'module-name': 'mx.mx'})
        moduleXml.close('component')
        moduleXml.close('module')
        moduleFile = join(module_dir, iml_file)
        mx.update_file(moduleFile, moduleXml.xml(indent='  ', newl='\n'))
        _add_declared_module(s)
        _add_declared_module(mx._mx_suite)

    if generate_native_projects:
        _intellij_native_projects(s, module_files_only, declared_modules, modulesXml)

    if generate_external_projects:
        _intellij_external_project(s.suiteDict.get('externalProjects', None), sdks, s)

    if not module_files_only:
        modulesXml.close('modules')
        modulesXml.close('component')
        modulesXml.close('project')
        moduleXmlFile = join(ideaProjectDirectory, 'modules.xml')
        mx.update_file(moduleXmlFile, modulesXml.xml(indent='  ', newl='\n'))

    if java_modules and not module_files_only:
        unique_library_file_names = set()
        librariesDirectory = mx.ensure_dir_exists(join(ideaProjectDirectory, 'libraries'))

        mx.ensure_dir_exists(librariesDirectory)

        def make_library(name, path, source_path, suite_dir):
            libraryXml = mx.XMLDoc()

            libraryXml.open('component', attributes={'name': 'libraryTable'})
            libraryXml.open('library', attributes={'name': name})
            libraryXml.open('CLASSES')
            pathX = mx.relpath_or_absolute(path, suite_dir, prefix='$PROJECT_DIR$')
            libraryXml.element('root', attributes={'url': 'jar://' + pathX + '!/'})
            libraryXml.close('CLASSES')
            libraryXml.element('JAVADOC')
            if sourcePath:
                libraryXml.open('SOURCES')
                if os.path.isdir(sourcePath):
                    sourcePathX = mx.relpath_or_absolute(sourcePath, suite_dir, prefix='$PROJECT_DIR$')
                    libraryXml.element('root', attributes={'url': 'file://' + sourcePathX})
                else:
                    source_pathX = mx.relpath_or_absolute(source_path, suite_dir, prefix='$PROJECT_DIR$')
                    libraryXml.element('root', attributes={'url': 'jar://' + source_pathX + '!/'})
                libraryXml.close('SOURCES')
            else:
                libraryXml.element('SOURCES')
            libraryXml.close('library')
            libraryXml.close('component')

            libraryFile = join(librariesDirectory, _intellij_library_file_name(name, unique_library_file_names))
            return mx.update_file(libraryFile, libraryXml.xml(indent='  ', newl='\n'))

        # Setup the libraries that were used above
        for library in libraries:
            sourcePath = None
            if library.isLibrary():
                path = library.get_path(True)
                if library.sourcePath:
                    sourcePath = library.get_source_path(True)
            elif library.isMavenProject():
                path = library.get_path(True)
                sourcePath = library.get_source_path(True)
            elif library.isJARDistribution():
                path = library.path
                if library.sourcesPath:
                    sourcePath = library.sourcesPath
            elif library.isClasspathDependency():
                path = library.classpath_repr()
            else:
                mx.abort('Dependency not supported: {} ({})'.format(library.name, library.__class__.__name__))
            make_library(library.name, path, sourcePath, s.dir)

        jdk = mx.get_jdk()
        updated = False
        for library in jdk_libraries:
            if library.classpath_repr(jdk) is not None:
                if make_library(library.name, library.classpath_repr(jdk), library.get_source_path(jdk), s.dir):
                    updated = True
        if jdk_libraries and updated:
            mx.log("Setting up JDK libraries using {0}".format(jdk))

        # Set annotation processor profiles up, and link them to modules in compiler.xml

        compilerXml.open('component', attributes={'name': 'CompilerConfiguration'})

        compilerXml.element('option', attributes={'name': "DEFAULT_COMPILER", 'value': 'Javac'})
        # using the --release option with javac interferes with using --add-modules which is required for some projects
        compilerXml.element('option', attributes={'name': "USE_RELEASE_OPTION", 'value': 'false'})
        compilerXml.element('resourceExtensions')
        compilerXml.open('wildcardResourcePatterns')
        compilerXml.element('entry', attributes={'name': '!?*.java'})
        compilerXml.close('wildcardResourcePatterns')
        if annotationProcessorProfiles:
            compilerXml.open('annotationProcessing')
            for t, modules in sorted(annotationProcessorProfiles.items()):
                source_gen_dir = t[0]
                processors = t[1:]
                compilerXml.open('profile', attributes={'default': 'false', 'name': '-'.join([ap.name for ap in processors]) + "-" + source_gen_dir, 'enabled': 'true'})
                compilerXml.element('sourceOutputDir', attributes={'name': join(os.pardir, source_gen_dir)})
                compilerXml.element('sourceTestOutputDir', attributes={'name': join(os.pardir, source_gen_dir)})
                compilerXml.open('processorPath', attributes={'useClasspath': 'false'})

                # IntelliJ supports both directories and jars on the annotation processor path whereas
                # Eclipse only supports jars.
                for apDep in processors:
                    def processApDep(dep, edge):
                        if dep.isLibrary() or dep.isJARDistribution():
                            compilerXml.element('entry', attributes={'name': mx.relpath_or_absolute(dep.path, s.dir, prefix='$PROJECT_DIR$')})
                        elif dep.isProject():
                            compilerXml.element('entry', attributes={'name': mx.relpath_or_absolute(dep.output_dir(), s.dir, prefix='$PROJECT_DIR$')})
                    apDep.walk_deps(visit=processApDep)
                compilerXml.close('processorPath')
                for module in modules:
                    compilerXml.element('module', attributes={'name': module.name})
                compilerXml.close('profile')
            compilerXml.close('annotationProcessing')

        compilerXml.close('component')

    if compilerXml:
        compilerXml.close('project')
        compilerFile = join(ideaProjectDirectory, 'compiler.xml')
        mx.update_file(compilerFile, compilerXml.xml(indent='  ', newl='\n'))

    if not module_files_only:
        # Write misc.xml for global JDK config
        miscXml = mx.XMLDoc()
        miscXml.open('project', attributes={'version' : '4'})

        if java_modules:
            mainJdk = mx.get_jdk()
            miscXml.open('component', attributes={'name' : 'ProjectRootManager', 'version': '2', 'languageLevel': _complianceToIntellijLanguageLevel(mainJdk.javaCompliance), 'project-jdk-name': intellij_get_java_sdk_name(sdks, mainJdk), 'project-jdk-type': intellij_java_sdk_type})
            miscXml.element('output', attributes={'url' : 'file://$PROJECT_DIR$/' + os.path.relpath(s.get_output_root(), s.dir)})
            miscXml.close('component')
        else:
            miscXml.element('component', attributes={'name' : 'ProjectRootManager', 'version': '2', 'project-jdk-name': intellij_get_python_sdk_name(sdks), 'project-jdk-type': intellij_python_sdk_type})

        miscXml.close('project')
        miscFile = join(ideaProjectDirectory, 'misc.xml')
        mx.update_file(miscFile, miscXml.xml(indent='  ', newl='\n'))

        # Generate a default configuration for debugging Graal
        runConfig = mx.XMLDoc()
        runConfig.open('component', attributes={'name' : 'ProjectRunConfigurationManager'})
        runConfig.open('configuration', attributes={'default' :'false', 'name' : 'GraalDebug', 'type' : 'Remote', 'factoryName': 'Remote'})
        runConfig.element('option', attributes={'name' : 'USE_SOCKET_TRANSPORT', 'value' : 'true'})
        runConfig.element('option', attributes={'name' : 'SERVER_MODE', 'value' : 'false'})
        runConfig.element('option', attributes={'name' : 'SHMEM_ADDRESS', 'value' : 'javadebug'})
        runConfig.element('option', attributes={'name' : 'HOST', 'value' : 'localhost'})
        runConfig.element('option', attributes={'name' : 'PORT', 'value' : '8000'})
        runConfig.open('RunnerSettings', attributes={'RunnerId' : 'Debug'})
        runConfig.element('option', attributes={'name' : 'DEBUG_PORT', 'value' : '8000'})
        runConfig.element('option', attributes={'name' : 'LOCAL', 'value' : 'false'})
        runConfig.close('RunnerSettings')
        runConfig.element('method')
        runConfig.close('configuration')
        runConfig.close('component')
        runConfigFile = join(ideaProjectDirectory, 'runConfigurations', 'GraalDebug.xml')
        mx.ensure_dir_exists(join(ideaProjectDirectory, 'runConfigurations'))
        mx.update_file(runConfigFile, runConfig.xml(indent='  ', newl='\n'))

        if java_modules:
            # Eclipse formatter config
            corePrefsSources = s.eclipse_settings_sources().get('org.eclipse.jdt.core.prefs')
            uiPrefsSources = s.eclipse_settings_sources().get('org.eclipse.jdt.ui.prefs')
            if corePrefsSources:
                miscXml = mx.XMLDoc()
                miscXml.open('project', attributes={'version' : '4'})
                out = StringIO()
                print('# GENERATED -- DO NOT EDIT', file=out)
                for source in corePrefsSources:
                    print('# Source:', source, file=out)
                    with open(source) as fileName:
                        for line in fileName:
                            if line.startswith('org.eclipse.jdt.core.formatter.'):
                                print(line.strip(), file=out)
                formatterConfigFile = join(ideaProjectDirectory, 'EclipseCodeFormatter.prefs')
                mx.update_file(formatterConfigFile, out.getvalue())
                importConfigFile = None
                if uiPrefsSources:
                    out = StringIO()
                    print('# GENERATED -- DO NOT EDIT', file=out)
                    for source in uiPrefsSources:
                        print('# Source:', source, file=out)
                        with open(source) as fileName:
                            for line in fileName:
                                if line.startswith('org.eclipse.jdt.ui.importorder') \
                                        or line.startswith('org.eclipse.jdt.ui.ondemandthreshold') \
                                        or line.startswith('org.eclipse.jdt.ui.staticondemandthreshold'):
                                    print(line.strip(), file=out)
                    importConfigFile = join(ideaProjectDirectory, 'EclipseImports.prefs')
                    mx.update_file(importConfigFile, out.getvalue())
                miscXml.open('component', attributes={'name' : 'EclipseCodeFormatterProjectSettings'})
                miscXml.open('option', attributes={'name' : 'projectSpecificProfile'})
                miscXml.open('ProjectSpecificProfile')
                miscXml.element('option', attributes={'name' : 'formatter', 'value' : 'ECLIPSE'})
                custom_eclipse_exe = mx.get_env('ECLIPSE_EXE')
                if custom_eclipse_exe:
                    custom_eclipse = dirname(custom_eclipse_exe)
                    if mx.is_darwin():
                        custom_eclipse = join(dirname(custom_eclipse), 'Eclipse')
                    if not exists(custom_eclipse_exe):
                        mx.abort('Custom eclipse "{}" does not exist'.format(custom_eclipse_exe))
                    miscXml.element('option', attributes={'name' : 'eclipseVersion', 'value' : 'CUSTOM'})
                    miscXml.element('option', attributes={'name' : 'pathToEclipse', 'value' : custom_eclipse})
                miscXml.element('option', attributes={'name' : 'pathToConfigFileJava', 'value' : '$PROJECT_DIR$/.idea/' + basename(formatterConfigFile)})
                if importConfigFile:
                    miscXml.element('option', attributes={'name' : 'importOrderConfigFilePath', 'value' : '$PROJECT_DIR$/.idea/' + basename(importConfigFile)})
                    miscXml.element('option', attributes={'name' : 'importOrderFromFile', 'value' : 'true'})

                miscXml.close('ProjectSpecificProfile')
                miscXml.close('option')
                miscXml.close('component')
                miscXml.close('project')
                miscFile = join(ideaProjectDirectory, 'eclipseCodeFormatter.xml')
                mx.update_file(miscFile, miscXml.xml(indent='  ', newl='\n'))

        if java_modules:
            # Write codestyle settings
            mx.ensure_dir_exists(join(ideaProjectDirectory, 'codeStyles'))

            codeStyleConfigXml = mx.XMLDoc()
            codeStyleConfigXml.open('component', attributes={'name': 'ProjectCodeStyleConfiguration'})
            codeStyleConfigXml.open('state')
            codeStyleConfigXml.element('option', attributes={'name': 'USE_PER_PROJECT_SETTINGS', 'value': 'true'})
            codeStyleConfigXml.close('state')
            codeStyleConfigXml.close('component')
            codeStyleConfigFile = join(ideaProjectDirectory, 'codeStyles', 'codeStyleConfig.xml')
            mx.update_file(codeStyleConfigFile, codeStyleConfigXml.xml(indent='  ', newl='\n'))

            codeStyleProjectXml = mx.XMLDoc()
            codeStyleProjectXml.open('component', attributes={'name': 'ProjectCodeStyleConfiguration'})
            codeStyleProjectXml.open('code_scheme', attributes={'name': 'Project', 'version': '173'})
            codeStyleProjectXml.open('JavaCodeStyleSettings')
            # We cannot entirely disable wildcards import, but we can set the threshold to an insane number.
            codeStyleProjectXml.element('option', attributes={'name': 'CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND', 'value': '65536'})
            codeStyleProjectXml.element('option', attributes={'name': 'NAMES_COUNT_TO_USE_IMPORT_ON_DEMAND', 'value': '65536'})
            codeStyleProjectXml.close('JavaCodeStyleSettings')
            codeStyleProjectXml.close('code_scheme')
            codeStyleProjectXml.close('component')
            codeStyleProjectFile = join(ideaProjectDirectory, 'codeStyles', 'Project.xml')
            mx.update_file(codeStyleProjectFile, codeStyleProjectXml.xml(indent='  ', newl='\n'))

            # Write checkstyle-idea.xml for the CheckStyle-IDEA
            checkstyleXml = mx.XMLDoc()
            checkstyleXml.open('project', attributes={'version': '4'})
            checkstyleXml.open('component', attributes={'name': 'CheckStyle-IDEA'})
            checkstyleXml.open('option', attributes={'name' : "configuration"})
            checkstyleXml.open('map')

            if max_checkstyle_version:
                checkstyleXml.element('entry', attributes={'key': "checkstyle-version", 'value': str(max_checkstyle_version)})

            # Initialize an entry for each style that is used
            checkstyleConfigs = set([])
            for p in s.projects_recursive():
                if not p.isJavaProject():
                    continue
                csConfig, checkstyleVersion, checkstyleProj = p.get_checkstyle_config()
                if not csConfig or csConfig in checkstyleConfigs:
                    continue
                checkstyleConfigs.add(csConfig)
                checkstyleXml.element('entry', attributes={'key' : "location-" + str(len(checkstyleConfigs)), 'value': "PROJECT_RELATIVE:" + join(checkstyleProj.dir, ".checkstyle_checks.xml") + ":" + checkstyleProj.name})

            checkstyleXml.close('map')
            checkstyleXml.close('option')
            checkstyleXml.close('component')
            checkstyleXml.close('project')
            checkstyleFile = join(ideaProjectDirectory, 'checkstyle-idea.xml')
            mx.update_file(checkstyleFile, checkstyleXml.xml(indent='  ', newl='\n'))

            # mx integration
            def antTargetName(dist):
                return 'archive_' + dist.name

            def artifactFileName(dist):
                return dist.name.replace('.', '_').replace('-', '_') + '.xml'
            validDistributions = [dist for dist in mx.sorted_dists() if not dist.suite.isBinarySuite() and not dist.isTARDistribution()]

            # 1) Make an ant file for archiving the distributions.
            antXml = mx.XMLDoc()
            antXml.open('project', attributes={'name': s.name, 'default': 'archive'})
            for dist in validDistributions:
                antXml.open('target', attributes={'name': antTargetName(dist)})
                antXml.open('exec', attributes={'executable': sys.executable})
                antXml.element('arg', attributes={'value': join(mx._mx_home, 'mx.py')})
                antXml.element('arg', attributes={'value': 'archive'})
                antXml.element('arg', attributes={'value': '@' + dist.name})
                antXml.close('exec')
                antXml.close('target')

            antXml.close('project')
            antFile = join(ideaProjectDirectory, 'ant-mx-archive.xml')
            mx.update_file(antFile, antXml.xml(indent='  ', newl='\n'))

            # 2) Tell IDEA that there is an ant-build.
            ant_mx_archive_xml = 'file://$PROJECT_DIR$/.idea/ant-mx-archive.xml'
            metaAntXml = mx.XMLDoc()
            metaAntXml.open('project', attributes={'version': '4'})
            metaAntXml.open('component', attributes={'name': 'AntConfiguration'})
            metaAntXml.open('buildFile', attributes={'url': ant_mx_archive_xml})
            metaAntXml.close('buildFile')
            metaAntXml.close('component')
            metaAntXml.close('project')
            metaAntFile = join(ideaProjectDirectory, 'ant.xml')
            mx.update_file(metaAntFile, metaAntXml.xml(indent='  ', newl='\n'))

            # 3) Make an artifact for every distribution
            validArtifactNames = {artifactFileName(dist) for dist in validDistributions}
            artifactsDir = join(ideaProjectDirectory, 'artifacts')
            mx.ensure_dir_exists(artifactsDir)
            for fileName in os.listdir(artifactsDir):
                filePath = join(artifactsDir, fileName)
                if os.path.isfile(filePath) and fileName not in validArtifactNames:
                    os.remove(filePath)

            for dist in validDistributions:
                artifactXML = mx.XMLDoc()
                artifactXML.open('component', attributes={'name': 'ArtifactManager'})
                artifactXML.open('artifact', attributes={'build-on-make': 'true', 'name': dist.name})
                artifactXML.open('output-path', data='$PROJECT_DIR$/mxbuild/artifacts/' + dist.name)
                artifactXML.close('output-path')
                artifactXML.open('properties', attributes={'id': 'ant-postprocessing'})
                artifactXML.open('options', attributes={'enabled': 'true'})
                artifactXML.open('file', data=ant_mx_archive_xml)
                artifactXML.close('file')
                artifactXML.open('target', data=antTargetName(dist))
                artifactXML.close('target')
                artifactXML.close('options')
                artifactXML.close('properties')
                artifactXML.open('root', attributes={'id': 'root'})
                for javaProject in [dep for dep in dist.archived_deps() if dep.isJavaProject()]:
                    artifactXML.element('element', attributes={'id': 'module-output', 'name': javaProject.name})
                for javaProject in [dep for dep in dist.deps if dep.isLibrary() or dep.isDistribution()]:
                    artifactXML.element('element', attributes={'id': 'artifact', 'artifact-name': javaProject.name})
                artifactXML.close('root')
                artifactXML.close('artifact')
                artifactXML.close('component')

                artifactFile = join(artifactsDir, artifactFileName(dist))
                mx.update_file(artifactFile, artifactXML.xml(indent='  ', newl='\n'))

        def intellij_scm_name(vc_kind):
            if vc_kind == 'git':
                return 'Git'
            elif vc_kind == 'hg':
                return 'hg4idea'

        vcsXml = mx.XMLDoc()
        vcsXml.open('project', attributes={'version': '4'})
        vcsXml.open('component', attributes={'name': 'VcsDirectoryMappings'})

        suites_for_vcs = mx.suites() + ([mx._mx_suite] if mx_python_modules else [])
        sourceSuitesWithVCS = [vc_suite for vc_suite in suites_for_vcs if vc_suite.isSourceSuite() and vc_suite.vc is not None]
        uniqueSuitesVCS = {(vc_suite.vc_dir, vc_suite.vc.kind) for vc_suite in sourceSuitesWithVCS}
        for vcs_dir, kind in uniqueSuitesVCS:
            vcsXml.element('mapping', attributes={'directory': vcs_dir, 'vcs': intellij_scm_name(kind)})

        vcsXml.close('component')
        vcsXml.close('project')

        vcsFile = join(ideaProjectDirectory, 'vcs.xml')
        mx.update_file(vcsFile, vcsXml.xml(indent='  ', newl='\n'))
示例#53
0
def hsdis(args, copyToDir=None):
    """download the hsdis library

    This is needed to support HotSpot's assembly dumping features.
    By default it downloads the Intel syntax version, use the 'att' argument to install AT&T syntax."""
    flavor = None
    if mx.get_arch() == "amd64":
        flavor = mx.get_env('HSDIS_SYNTAX')
        if flavor is None:
            flavor = 'intel'
        if 'att' in args:
            flavor = 'att'

    libpattern = mx.add_lib_suffix('hsdis-' + mx.get_arch() + '-' +
                                   mx.get_os() + '-%s')

    sha1s = {
        'att/hsdis-amd64-windows-%s.dll':
        'bcbd535a9568b5075ab41e96205e26a2bac64f72',
        'att/hsdis-amd64-linux-%s.so':
        '36a0b8e30fc370727920cc089f104bfb9cd508a0',
        'att/hsdis-amd64-darwin-%s.dylib':
        'c1865e9a58ca773fdc1c5eea0a4dfda213420ffb',
        'intel/hsdis-amd64-windows-%s.dll':
        '6a388372cdd5fe905c1a26ced614334e405d1f30',
        'intel/hsdis-amd64-linux-%s.so':
        '0d031013db9a80d6c88330c42c983fbfa7053193',
        'intel/hsdis-amd64-darwin-%s.dylib':
        '67f6d23cbebd8998450a88b5bef362171f66f11a',
        'hsdis-sparcv9-solaris-%s.so':
        '970640a9af0bd63641f9063c11275b371a59ee60',
        'hsdis-sparcv9-linux-%s.so':
        '0c375986d727651dee1819308fbbc0de4927d5d9',
    }

    if flavor:
        flavoredLib = flavor + "/" + libpattern
    else:
        flavoredLib = libpattern
    if flavoredLib not in sha1s:
        mx.warn(
            "hsdis with flavor '{}' not supported on this platform or architecture"
            .format(flavor))
        return

    sha1 = sha1s[flavoredLib]
    lib = flavoredLib % (sha1)
    path = join(_suite.get_output_root(), lib)
    if not exists(path):
        sha1path = path + '.sha1'
        mx.download_file_with_sha1(
            'hsdis',
            path, ['https://lafo.ssw.uni-linz.ac.at/pub/hsdis/' + lib],
            sha1,
            sha1path,
            True,
            True,
            sources=False)

    overwrite = True
    if copyToDir is None:
        # Try install hsdis into JAVA_HOME
        overwrite = False
        base = mx.get_jdk().home
        if exists(join(base, 'jre')):
            copyToDir = join(base, 'jre', 'lib')
        else:
            copyToDir = join(base, 'lib')

    if exists(copyToDir):
        dest = join(copyToDir, mx.add_lib_suffix('hsdis-' + mx.get_arch()))
        if exists(dest) and not overwrite:
            import filecmp
            # Only issue warning if existing lib is different
            if filecmp.cmp(path, dest) == False:
                mx.warn('Not overwriting existing {} with {}'.format(
                    dest, path))
        else:
            try:
                shutil.copy(path, dest)
                mx.log('Copied {} to {}'.format(path, dest))
            except IOError as e:
                mx.warn('Could not copy {} to {}: {}'.format(
                    path, dest, str(e)))
示例#54
0
    def build(self):
        source_dir = self.subject.getSourceDir()
        output_dir = self.subject.getOutputDir()
        if not emcc_dir:
            mx.abort("No EMCC_DIR specified - the source programs will not be compiled to .wasm.")
        emcc_cmd = os.path.join(emcc_dir, "emcc")
        gcc_cmd = os.path.join(gcc_dir, "gcc")
        if mx.run([emcc_cmd, "-v"], nonZeroIsFatal=False) != 0:
            mx.abort("Could not check the emcc version.")
        if mx.run([gcc_cmd, "--version"], nonZeroIsFatal=False) != 0:
            mx.abort("Could not check the gcc version.")
        if not wabt_dir:
            mx.abort("Set WABT_DIR if you want the binary to include .wat files.")
        mx.log("Building files from the source dir: " + source_dir)
        cc_flags = ["-g2", "-O3"]
        include_flags = []
        if hasattr(self.project, "includeset"):
            include_flags = ["-I", os.path.join(_suite.dir, "includes", self.project.includeset)]
        emcc_flags = ["-s", "EXIT_RUNTIME=1", "-s", "STANDALONE_WASM", "-s", "WASM_BIGINT"] + cc_flags
        if self.project.isBenchmarkProject():
            emcc_flags = emcc_flags + ["-s", "EXPORTED_FUNCTIONS=" + str(self.benchmark_methods()).replace("'", "\"") + ""]
        subdir_program_names = defaultdict(lambda: [])
        for root, filename in self.subject.getProgramSources():
            subdir = os.path.relpath(root, self.subject.getSourceDir())
            mx.ensure_dir_exists(os.path.join(output_dir, subdir))

            basename = remove_extension(filename)
            source_path = os.path.join(root, filename)
            output_wasm_path = os.path.join(output_dir, subdir, basename + ".wasm")
            output_js_path = os.path.join(output_dir, subdir, basename + ".js")
            timestampedSource = mx.TimeStampFile(source_path)
            timestampedOutput = mx.TimeStampFile(output_wasm_path)
            mustRebuild = timestampedSource.isNewerThan(timestampedOutput) or not timestampedOutput.exists()

            # Step 1: build the .wasm binary.
            if mustRebuild:
                if filename.endswith(".c"):
                    # This generates both a js file and a wasm file.
                    # See https://github.com/emscripten-core/emscripten/wiki/WebAssembly-Standalone
                    build_cmd_line = [emcc_cmd] + emcc_flags + [source_path, "-o", output_js_path] + include_flags
                    if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0:
                        mx.abort("Could not build the wasm-only output of " + filename + " with emcc.")
                elif filename.endswith(".wat"):
                    # Step 1: compile the .wat file to .wasm.
                    wat2wasm_cmd = os.path.join(wabt_dir, "wat2wasm")
                    build_cmd_line = [wat2wasm_cmd, "-o", output_wasm_path, source_path]
                    if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0:
                        mx.abort("Could not translate " + filename + " to binary format.")
                elif filename.endswith(".wasm"):
                    shutil.copyfile(source_path, output_wasm_path)

            else:
                mx.logv("skipping, file is up-to-date: " + source_path)

            # Step 2: copy the result file if it exists.
            result_path = os.path.join(root, basename + ".result")
            if os.path.isfile(result_path):
                result_output_path = os.path.join(output_dir, subdir, basename + ".result")
                shutil.copyfile(result_path, result_output_path)

            # Step 3: copy the opts file if it exists.
            opts_path = os.path.join(root, basename + ".opts")
            if os.path.isfile(opts_path):
                opts_output_path = os.path.join(output_dir, subdir, basename + ".opts")
                shutil.copyfile(opts_path, opts_output_path)

            output_wat_path = os.path.join(output_dir, subdir, basename + ".wat")
            if mustRebuild:
                if filename.endswith(".c"):
                    # Step 4: produce the .wat files, for easier debugging.
                    wasm2wat_cmd = os.path.join(wabt_dir, "wasm2wat")
                    if mx.run([wasm2wat_cmd, "-o", output_wat_path, output_wasm_path], nonZeroIsFatal=False) != 0:
                        mx.abort("Could not compile .wat file for " + filename)
                elif filename.endswith(".wat"):
                    # Step 4: copy the .wat file, for easier debugging.
                    wat_path = os.path.join(root, basename + ".wat")
                    shutil.copyfile(wat_path, output_wat_path)

            # Step 5: if this is a benchmark project, create native binaries too.
            if mustRebuild:
                if filename.endswith(".c"):
                    mx.ensure_dir_exists(os.path.join(output_dir, subdir, NATIVE_BENCH_DIR))
                    output_path = os.path.join(output_dir, subdir, NATIVE_BENCH_DIR, mx.exe_suffix(basename))
                    link_flags = ["-lm"]
                    gcc_cmd_line = [gcc_cmd] + cc_flags + [source_path, "-o", output_path] + include_flags + link_flags
                    if mx.run(gcc_cmd_line, nonZeroIsFatal=False) != 0:
                        mx.abort("Could not build the native binary of " + filename + ".")
                    os.chmod(output_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
                elif filename.endswith(".wat"):
                    mx.warn("The .wat files are not translated to native binaries: " + filename)

            # Remember the source name.
            subdir_program_names[subdir].append(basename)
        for subdir in subdir_program_names:
            with open(os.path.join(output_dir, subdir, "wasm_test_index"), "w") as f:
                for name in subdir_program_names[subdir]:
                    f.write(name)
                    f.write("\n")
示例#55
0
文件: mx_fastr.py 项目: iCodeIN/fastr
def build_binary_pkgs(args_in, **kwargs):
    '''
    Builds binary packages of components that we cache for build speed-up.
    See the CI scripts for details.
    '''
    parser = ArgumentParser()
    parser.add_argument(
        '--f2c-version',
        type=int,
        dest='f2c_version',
        required=True,
        help='Current version of f2c, the tarball will use this + 1')
    parser.add_argument(
        '--recommended-pkgs-version',
        default=0,
        type=int,
        dest='recommended_version',
        help=
        'Current version of recommended packages binary, the tarball will use this + 1'
    )
    parser.add_argument('--recommended-pkgs-list',
                        dest='recommended_list',
                        required=True,
                        help='Comma separated list of recommended packages')
    args = parser.parse_args(args_in)

    os_name = platform.system().lower()
    dest_dir = os.path.join(_fastr_suite.dir, 'binary-packages')
    shutil.rmtree(dest_dir, ignore_errors=True)
    mx.ensure_dir_exists(dest_dir)

    # F2C
    # creates binary-packages/f2c-binary-{version}-{osname}-amd64/f2c with contents of FASTR_HOME/f2c
    f2c_name = 'f2c-binary-' + str(args.f2c_version +
                                   1) + '-' + os_name + '-amd64'
    f2c_path = os.path.join(dest_dir, f2c_name)
    shutil.copytree(os.path.join(_fastr_suite.dir, 'f2c'),
                    os.path.join(f2c_path, 'f2c'))
    # creates the tarball
    result_tarball = os.path.join(dest_dir, f2c_name + '.tar.gz')
    with tarfile.open(result_tarball, "w:gz") as tar:
        tar.add(f2c_path, arcname=os.path.basename(f2c_path))
    mx.log("Binary package created at: " + result_tarball)

    # Recommended packages
    # creates binary-packages/fastr-recommended-pkgs-{version}-{osname}-amd64/fastr-recommended-pkgs
    pkgs_name = 'fastr-recommended-pkgs-' + str(args.recommended_version +
                                                1) + '-' + os_name + '-amd64'
    pkgs_path = os.path.join(dest_dir, pkgs_name)
    pkgs_pkgs_path = os.path.join(pkgs_path, 'pkgs')
    mx.ensure_dir_exists(pkgs_pkgs_path)
    for pkg_name in args.recommended_list.split(','):
        shutil.copytree(os.path.join(_fastr_suite.dir, 'library', pkg_name),
                        os.path.join(pkgs_pkgs_path, pkg_name))
    # add file with API digest
    try:
        with open(os.path.join(pkgs_path, 'api-checksum.txt'), 'w') as f:
            sys.stdout = f
            pkgcache(['--print-api-checksum', '--vm', 'fastr'])
    finally:
        sys.stdout = sys.__stdout__
    # creates the tarball
    result_tarball = os.path.join(dest_dir, pkgs_name + '.tar.gz')
    with tarfile.open(result_tarball, "w:gz") as tar:
        tar.add(pkgs_path, arcname=os.path.basename(pkgs_path))
    mx.log("Binary package created at: " + result_tarball)

    mx.log("Contents of the " + dest_dir + "directory: ")
    mx.run(['ls', '-R', dest_dir])
    return 0
示例#56
0
文件: mx_downstream.py 项目: mukel/mx
 def ignore_output_root(d, names):
     mx.log('Copying ' + d)
     if d == os.path.dirname(output_root):
         mx.log('Omitting ' + output_root)
         return [os.path.basename(output_root)]
     return []
示例#57
0
def run_shared_lib_test(args=None):
    mx.run_mx([
        "--dynamicimports", "/substratevm,/vm", "build",
        "--force-deprecation-as-warning", "--dependencies",
        "GRAAL_MANAGEMENT,POLYGLOT_NATIVE_API_HEADERS,libpolyglot.so.image"
    ],
              nonZeroIsFatal=True)
    vmdir = os.path.join(mx.suite("truffle").dir, "..", "vm")
    svm_lib_path = os.path.join(vmdir, "mxbuild",
                                "-".join([mx.get_os(),
                                          mx.get_arch()]),
                                "libpolyglot.so.image")
    fd = name = progname = None
    try:
        fd, name = tempfile.mkstemp(suffix='.c')
        os.write(
            fd, """
        #include "stdio.h"
        #include "polyglot_api.h"

        #define assert_ok(msg, f) { if (!(f)) { \\
             const poly_extended_error_info* error_info; \\
             poly_get_last_error_info(isolate_thread, &error_info); \\
             fprintf(stderr, "%s\\n", error_info->error_message); \\
             return fprintf(stderr, "%s\\n", msg); } } while (0)

        poly_isolate global_isolate;
        poly_thread isolate_thread;
        poly_engine engine;
        poly_context context;

        static poly_status create_context() {
            poly_status status;

            if (poly_attach_thread(global_isolate, &isolate_thread)) {
                return poly_generic_failure;
            }

            poly_engine_builder engine_builder;
            status = poly_create_engine_builder(isolate_thread, &engine_builder);
            if (status != poly_ok) {
                return status;
            }
            status = poly_engine_builder_build(isolate_thread, engine_builder, &engine);
            if (status != poly_ok) {
                return status;
            }
            poly_context_builder builder;
            status = poly_create_context_builder(isolate_thread, NULL, 0, &builder);
            if (status != poly_ok) {
                return status;
            }
            status = poly_context_builder_engine(isolate_thread, builder, engine);
            if (status != poly_ok) {
                return status;
            }
            status = poly_context_builder_option(isolate_thread, builder, "python.VerboseFlag", "true");
            if (status != poly_ok) {
            return status;
            }
            status = poly_context_builder_allow_io(isolate_thread, builder, true);
            if (status != poly_ok) {
            return status;
            }
            status = poly_context_builder_build(isolate_thread, builder, &context);
            if (status != poly_ok) {
                return status;
            }

            poly_destroy_handle(isolate_thread, engine_builder);
            poly_destroy_handle(isolate_thread, builder);

            return poly_ok;
        }

        static poly_status tear_down_context() {
            poly_status status = poly_context_close(isolate_thread, context, true);
            if (status != poly_ok) {
                return status;
            }

            status = poly_destroy_handle(isolate_thread, context);
            if (status != poly_ok) {
                return status;
            }

            status = poly_engine_close(isolate_thread, engine, true);
            if (status != poly_ok) {
                return status;
            }

            status = poly_destroy_handle(isolate_thread, engine);
            if (status != poly_ok) {
                return status;
            }

            if (poly_detach_thread(isolate_thread)) {
                return poly_ok;
            }

            return poly_ok;
        }

        static int test_basic_python_function() {
            assert_ok("Context creation failed.", create_context() == poly_ok);

            poly_value func;
            assert_ok("function eval failed", poly_context_eval(isolate_thread, context, "python", "test_func", "def test_func(x):\\n  return x * x\\ntest_func", &func) == poly_ok);
            int32_t arg_value = 42;
            poly_value primitive_object;
            assert_ok("create argument failed", poly_create_int32(isolate_thread, context, arg_value, &primitive_object) == poly_ok);
            poly_value arg[1] = {primitive_object};
            poly_value value;
            assert_ok("invocation was unsuccessful", poly_value_execute(isolate_thread, func, arg, 1, &value) == poly_ok);

            int32_t result_value;
            poly_value_as_int32(isolate_thread, value, &result_value);

            assert_ok("primitive free failed", poly_destroy_handle(isolate_thread, primitive_object) == poly_ok);
            assert_ok("value free failed", poly_destroy_handle(isolate_thread, value) == poly_ok);
            assert_ok("value computation was incorrect", result_value == 42 * 42);
            assert_ok("func free failed", poly_destroy_handle(isolate_thread, func) == poly_ok);
            assert_ok("Context tear down failed.", tear_down_context() == poly_ok);
            return 0;
        }

        int32_t main(int32_t argc, char **argv) {
            poly_isolate_params isolate_params = {};
            if (poly_create_isolate(&isolate_params, &global_isolate)) {
                return 1;
            }
            return test_basic_python_function();
        }
        """)
        os.close(fd)
        progname = os.path.join(SUITE.dir, "graalpython-embedded-tool")
        mx.log("".join([
            "Running ", "'clang",
            "-I%s" % svm_lib_path,
            "-L%s" % svm_lib_path, name, "-o", progname, "-lpolyglot"
        ]))
        mx.run([
            "clang",
            "-I%s" % svm_lib_path,
            "-L%s" % svm_lib_path, name,
            "-o%s" % progname, "-lpolyglot"
        ],
               nonZeroIsFatal=True)
        mx.log("Running " + progname + " with LD_LIBRARY_PATH " + svm_lib_path)
        mx.run(["ls", "-l", progname])
        mx.run(["ls", "-l", svm_lib_path])
        run_env = {
            "LD_LIBRARY_PATH": svm_lib_path,
            "GRAAL_PYTHONHOME": os.environ["GRAAL_PYTHONHOME"]
        }
        print(run_env)
        mx.run([progname], env=run_env)
    finally:
        try:
            os.unlink(progname)
        except:
            pass
        try:
            os.close(fd)
        except:
            pass
        try:
            os.unlink(name)
        except:
            pass
示例#58
0
def make_java_module(dist, jdk):
    """
    Creates a Java module from a distribution.

    :param JARDistribution dist: the distribution from which to create a module
    :param JDKConfig jdk: a JDK with a version >= 9 that can be used to compile the module-info class
    :return: the `JavaModuleDescriptor` for the created Java module
    """
    info = get_java_module_info(dist)
    if info is None:
        return None

    moduleName, moduleDir, moduleJar = info  # pylint: disable=unpacking-non-sequence
    mx.log('Building Java module ' + moduleName + ' from ' + dist.name)
    exports = {}
    requires = {}
    concealedRequires = {}
    uses = set()

    modulepath = list()
    usedModules = set()

    if dist.suite.getMxCompatibility().moduleDepsEqualDistDeps():
        moduledeps = dist.archived_deps()
        for dep in mx.classpath_entries(dist, includeSelf=False):
            jmd = make_java_module(dep,
                                   jdk) if dep.isJARDistribution() else None
            if jmd:
                modulepath.append(jmd)
                requires[jmd.name] = set(
                    [jdk.get_transitive_requires_keyword()])
            elif (dep.isJdkLibrary()
                  or dep.isJreLibrary()) and dep.is_provided_by(jdk):
                pass
            else:
                mx.abort(dist.name + ' cannot depend on ' + dep.name +
                         ' as it does not define a module')
    else:
        moduledeps = get_module_deps(dist)

    # Append JDK modules to module path
    jdkModules = jdk.get_modules()
    if not isinstance(jdkModules, list):
        jdkModules = list(jdkModules)
    allmodules = modulepath + jdkModules

    javaprojects = [d for d in moduledeps if d.isJavaProject()]

    # Collect packages in the module first
    packages = set()
    for dep in javaprojects:
        packages.update(dep.defined_java_packages())

    for dep in javaprojects:
        uses.update(getattr(dep, 'uses', []))
        for pkg in itertools.chain(
                dep.imported_java_packages(projectDepsOnly=False),
                getattr(dep, 'imports', [])):
            # Only consider packages not defined by the module we're creating. This handles the
            # case where we're creating a module that will upgrade an existing upgradeable
            # module in the JDK such as jdk.internal.vm.compiler.
            if pkg not in packages:
                depModule, visibility = lookup_package(allmodules, pkg,
                                                       moduleName)
                if depModule and depModule.name != moduleName:
                    requires.setdefault(depModule.name, set())
                    if visibility == 'exported':
                        # A distribution based module does not re-export its imported JDK packages
                        usedModules.add(depModule)
                    else:
                        assert visibility == 'concealed'
                        concealedRequires.setdefault(depModule.name,
                                                     set()).add(pkg)
                        usedModules.add(depModule)

        # If an "exports" attribute is not present, all packages are exported
        for package in _expand_package_info(
                dep, getattr(dep, 'exports', dep.defined_java_packages())):
            exports.setdefault(package, [])

    provides = {}
    if exists(moduleDir):
        shutil.rmtree(moduleDir)
    for d in [dist] + [md for md in moduledeps if md.isJARDistribution()]:
        if d.isJARDistribution():
            with zipfile.ZipFile(d.path, 'r') as zf:
                # To compile module-info.java, all classes it references must either be given
                # as Java source files or already exist as class files in the output directory.
                # As such, the jar file for each constituent distribution must be unpacked
                # in the output directory.
                zf.extractall(path=moduleDir)
                names = frozenset(zf.namelist())
                for arcname in names:
                    if arcname.startswith(
                            'META-INF/services/'
                    ) and not arcname == 'META-INF/services/':
                        service = arcname[len('META-INF/services/'):]
                        assert '/' not in service
                        provides.setdefault(service, set()).update(
                            zf.read(arcname).splitlines())
                        # Service types defined in the module are assumed to be used by the module
                        serviceClass = service.replace('.', '/') + '.class'
                        if serviceClass in names:
                            uses.add(service)

    jmd = JavaModuleDescriptor(moduleName,
                               exports,
                               requires,
                               uses,
                               provides,
                               packages=packages,
                               concealedRequires=concealedRequires,
                               jarpath=moduleJar,
                               dist=dist,
                               modulepath=modulepath)

    # Compile module-info.class
    moduleInfo = join(moduleDir, 'module-info.java')
    with open(moduleInfo, 'w') as fp:
        print >> fp, jmd.as_module_info()
    javacCmd = [jdk.javac, '-d', moduleDir]
    jdkModuleNames = [m.name for m in jdkModules]
    modulepathJars = [
        m.jarpath for m in jmd.modulepath
        if m.jarpath and m.name not in jdkModuleNames
    ]
    upgrademodulepathJars = [
        m.jarpath for m in jmd.modulepath
        if m.jarpath and m.name in jdkModuleNames
    ]
    if modulepathJars:
        javacCmd.append('--module-path')
        javacCmd.append(os.pathsep.join(modulepathJars))
    if upgrademodulepathJars:
        javacCmd.append('--upgrade-module-path')
        javacCmd.append(os.pathsep.join(upgrademodulepathJars))
    javacCmd.append(moduleInfo)
    mx.run(javacCmd)

    # Create the module jar
    shutil.make_archive(moduleJar, 'zip', moduleDir)
    os.rename(moduleJar + '.zip', moduleJar)
    jmd.save()
    return jmd
示例#59
0
文件: mx_gate.py 项目: JornVernee/mx
def checkheaders(args):
    """check Java source headers against any required pattern"""
    mx.log('The checkheaders command is obsolete.  The checkstyle or checkcopyrights command performs\n'\
           'the required checks depending on the mx configuration.')
    return 0
示例#60
0
文件: mx_gate.py 项目: JornVernee/mx
 def stop(self):
     if Task.log:
         self.end = time.time()
         self.duration = datetime.timedelta(seconds=self.end - self.start)
         mx.log(self._timestamp(' END:   ') + self.title + ' [' + str(self.duration) + ']' + Task._diskstats())
     return self