def pullInstallDragonEgg(args=None): """downloads and installs dragonegg (assumes that compatible GCC and G++ versions are installed)""" toolDir = join(_toolDir, "tools/dragonegg") mx.ensure_dir_exists(toolDir) url = "https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/dragonegg-3.2.src.tar.gz" localPath = pullsuite(toolDir, [url]) tar(localPath, toolDir) os.remove(localPath) if mx.get_os() == "darwin": gccToolDir = join(_toolDir, "tools/gcc") url = "https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/gcc-4.6.4.tar.gz" localPath = pullsuite(gccToolDir, [url]) tar(localPath, gccToolDir) os.remove(localPath) mx.run( ["patch", "-p1", _toolDir + "tools/dragonegg/dragonegg-3.2.src/Makefile", "mx.sulong/dragonegg-mac.patch"] ) os.environ["GCC"] = getGCC() os.environ["CXX"] = getGPP() os.environ["CC"] = getGCC() pullLLVMBinaries() os.environ["LLVM_CONFIG"] = findLLVMProgram("llvm-config") print os.environ["LLVM_CONFIG"] compileCommand = ["make"] return mx.run(compileCommand, cwd=_toolDir + "tools/dragonegg/dragonegg-3.2.src")
def get_java_module_info(dist, fatalIfNotModule=False): """ Gets the metadata for the module derived from `dist`. :param JARDistribution dist: a distribution possibly defining a module :param bool fatalIfNotModule: specifies whether to abort if `dist` does not define a module :return: None if `dist` does not define a module otherwise a tuple containing the name of the module, the directory in which the class files (including module-info.class) for the module are staged and finally the path to the jar file containing the built module """ if dist.suite.getMxCompatibility().moduleDepsEqualDistDeps(): moduleName = getattr(dist, 'moduleName', None) if not moduleName: if fatalIfNotModule: mx.abort('Distribution ' + dist.name + ' does not define a module') return None assert len(moduleName) > 0, '"moduleName" attribute of distribution ' + dist.name + ' cannot be empty' else: if not get_module_deps(dist): if fatalIfNotModule: mx.abort('Module for distribution ' + dist.name + ' would be empty') return None moduleName = dist.name.replace('_', '.').lower() modulesDir = mx.ensure_dir_exists(join(dist.suite.get_output_root(), 'modules')) moduleDir = mx.ensure_dir_exists(join(modulesDir, moduleName)) moduleJar = join(modulesDir, moduleName + '.jar') return moduleName, moduleDir, moduleJar
def testgraal(args): cloneFrom = mx.get_env("GRAAL_URL") if not cloneFrom: cloneFrom = "http://github.com/graalvm/graal-core" graalSuiteSubDir = mx.get_env("GRAAL_SUITE_SUBDIR") suite = mx.suite('truffle') suiteDir = suite.dir workDir = join(suite.get_output_root(), 'sanitycheck') mx.ensure_dir_exists(join(workDir, suite.name)) for f in os.listdir(suiteDir): subDir = os.path.join(suiteDir, f) if subDir == suite.get_output_root(): continue src = join(suiteDir, f) tgt = join(workDir, suite.name, f) if isdir(src): if exists(tgt): shutil.rmtree(tgt) shutil.copytree(src, tgt) else: shutil.copy(src, tgt) sanityDir = join(workDir, 'sanity') git = mx.GitConfig() if exists(sanityDir): git.pull(sanityDir) else: git.clone(cloneFrom, sanityDir) sanitySuiteDir = sanityDir if graalSuiteSubDir is None else join(sanityDir, graalSuiteSubDir) return mx.run_mx(['--java-home=' + mx.get_jdk().home, 'gate', '-B--force-deprecation-as-warning', '--tags', 'build,test'], sanitySuiteDir)
def _update_JDK9_STUBS_library(): """ Sets the "path" and "sha1" attributes of the "JDK9_STUBS" library. """ jdk9InternalLib = _suite.suiteDict['libraries']['JDK9_STUBS'] jarInputDir = join(_suite.get_output_root(), 'jdk9-stubs') jarPath = join(_suite.get_output_root(), 'jdk9-stubs.jar') stubs = [ ('jdk.internal.misc', 'VM', """package jdk.internal.misc; public class VM { public static String getSavedProperty(String key) { throw new InternalError("should not reach here"); } } """) ] if not exists(jarPath): sourceFiles = [] for (package, className, source) in stubs: sourceFile = join(jarInputDir, package.replace('.', os.sep), className + '.java') mx.ensure_dir_exists(os.path.dirname(sourceFile)) with open(sourceFile, 'w') as fp: fp.write(source) sourceFiles.append(sourceFile) jdk = mx.get_jdk(tag='default') mx.run([jdk.javac, '-d', jarInputDir] + sourceFiles) mx.run([jdk.jar, 'cf', jarPath, '.'], cwd=jarInputDir) jdk9InternalLib['path'] = jarPath jdk9InternalLib['sha1'] = mx.sha1OfFile(jarPath)
def pullLLVMSuite(args=None): """downloads the official (non Truffle) LLVM test suite""" mx.ensure_dir_exists(_llvmSuiteDir) urls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/test-suite-3.2.src.tar.gz"] localPath = pullsuite(_llvmSuiteDir, urls) tar(localPath, _llvmSuiteDir) os.remove(localPath)
def pullNWCCSuite(args=None): """downloads the NWCC test suite""" mx.ensure_dir_exists(_nwccSuiteDir) urls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/nwcc_0.8.3.tar.gz"] localPath = pullsuite(_nwccSuiteDir, urls) tar(localPath, _nwccSuiteDir, ['nwcc_0.8.3/tests/', 'nwcc_0.8.3/test2/'], stripLevels=1) os.remove(localPath)
def pullLLVMBinaries(args=None): """downloads the LLVM binaries""" toolDir = join(_toolDir, "tools/llvm") mx.ensure_dir_exists(toolDir) osStr = mx.get_os() arch = mx.get_arch() if osStr == 'windows': print 'windows currently only supported with cygwin!' return elif osStr == 'linux': if arch == 'amd64': urls = ['http://lafo.ssw.uni-linz.ac.at/sulong-deps/clang+llvm-3.2-x86_64-linux-ubuntu-12.04.tar.gz', 'http://llvm.org/releases/3.2/clang+llvm-3.2-x86_64-linux-ubuntu-12.04.tar.gz'] else: urls = ['http://lafo.ssw.uni-linz.ac.at/sulong-deps/clang+llvm-3.2-x86-linux-ubuntu-12.04.tar.gz', 'http://llvm.org/releases/3.2/clang+llvm-3.2-x86-linux-ubuntu-12.04.tar.gz'] elif osStr == 'darwin': urls = ['http://lafo.ssw.uni-linz.ac.at/sulong-deps/clang+llvm-3.2-x86_64-apple-darwin11.tar.gz', 'http://llvm.org/releases/3.2/clang+llvm-3.2-x86_64-apple-darwin11.tar.gz'] elif osStr == 'cygwin': urls = ['http://lafo.ssw.uni-linz.ac.at/sulong-deps/clang+llvm-3.2-x86-mingw32-EXPERIMENTAL.tar.gz', 'http://llvm.org/releases/3.2/clang+llvm-3.2-x86-mingw32-EXPERIMENTAL.tar.gz'] else: print osStr, arch, "not supported!" localPath = pullsuite(toolDir, urls) tar(localPath, toolDir, stripLevels=1) os.remove(localPath)
def pullArgon2(args=None): """downloads Argon2""" mx.ensure_dir_exists(_argon2Dir) urls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/20160406.tar.gz"] localPath = pullsuite(_argon2Dir, urls) tar(localPath, _argon2Dir, ['phc-winner-argon2-20160406/'], stripLevels=1) os.remove(localPath)
def _get_java_module_info(dist): assert len(get_module_deps(dist)) != 0 modulesDir = mx.ensure_dir_exists(join(dist.suite.get_output_root(), 'modules')) moduleName = dist.name.replace('_', '.').lower() moduleDir = mx.ensure_dir_exists(join(modulesDir, moduleName)) moduleJar = join(modulesDir, moduleName + '.jar') return moduleName, moduleDir, moduleJar
def pullLifetime(args=None): """downloads the lifetime reference outputs""" mx.ensure_dir_exists(_lifetimeReferenceDir) urls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/lifetime-analysis-ref.tar.gz"] localPath = pullsuite(_lifetimeReferenceDir, urls) tar(localPath, _lifetimeReferenceDir) os.remove(localPath)
def jacocoreport(args): """create a JaCoCo coverage report Creates the report from the 'jacoco.exec' file in the current directory. Default output directory is 'coverage', but an alternative can be provided as an argument.""" jacocoreport = mx.library("JACOCOREPORT", True) out = 'coverage' if len(args) == 1: out = args[0] elif len(args) > 1: mx.abort('jacocoreport takes only one argument : an output directory') includes = list(_jacoco_includes) for p in mx.projects(): projsetting = getattr(p, 'jacoco', '') if projsetting == 'include' or projsetting == '': includes.append(p.name) includedirs = set() for p in mx.projects(): projsetting = getattr(p, 'jacoco', '') if projsetting == 'exclude': continue for include in includes: if include in p.dir: includedirs.add(p.dir) for i in includedirs: bindir = i + '/bin' mx.ensure_dir_exists(bindir) mx.run_java(['-jar', jacocoreport.get_path(True), '--in', 'jacoco.exec', '--out', out] + sorted(includedirs))
def pullNWCCSuite(args=None): """downloads the NWCC test suite""" mx.ensure_dir_exists(_nwccSuiteDir) urls = ["http://lafo.ssw.uni-linz.ac.at/sulong-deps/nwcc_0.8.3.tar.gz", "http://sourceforge.net/projects/nwcc/files/nwcc/nwcc%200.8.3/nwcc_0.8.3.tar.gz/download"] localPath = pullsuite(_nwccSuiteDir, urls) tar(localPath, _nwccSuiteDir, ['nwcc_0.8.3/tests/', 'nwcc_0.8.3/test2/'], stripLevels=1) os.remove(localPath)
def pullGCCSuite(args=None): """downloads the GCC test suite""" suiteDir = _gccSuiteDir mx.ensure_dir_exists(suiteDir) urls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/gcc-5.2.0.tar.gz"] localPath = pullsuite(suiteDir, urls) tar(localPath, suiteDir, ['gcc-5.2.0/gcc/testsuite/']) os.remove(localPath)
def pullBenchmarkGame(args=None): """downloads the benchmarks""" mx.ensure_dir_exists(_benchGameSuiteDir) urls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/benchmarksgame-scm-latest.tar.gz"] localPath = pullsuite(_benchGameSuiteDir, urls) tar(localPath, _benchGameSuiteDir, ['benchmarksgame-2014-08-31/benchmarksgame/bench/'], stripLevels=3) os.remove(localPath) renameBenchmarkFiles()
def pullTestSuite(library, destDir, **kwargs): """downloads and unpacks a test suite""" mx.ensure_dir_exists(destDir) localPath = mx.library(library).get_path(True) mx_sulong.tar(localPath, destDir, **kwargs) os.remove(localPath) sha1Path = localPath + '.sha1' if os.path.exists(sha1Path): os.remove(sha1Path)
def pullGCCSuite(args=None): """downloads the GCC test suite""" suiteDir = _gccSuiteDir mx.ensure_dir_exists(suiteDir) urls = ["http://lafo.ssw.uni-linz.ac.at/sulong-deps/gcc-5.2.0.tar.gz", "ftp://gd.tuwien.ac.at/gnu/gcc/releases/gcc-5.2.0/gcc-5.2.0.tar.gz", "ftp://ftp.fu-berlin.de/unix/languages/gcc/releases/gcc-5.2.0/gcc-5.2.0.tar.gz", "http://mirrors-usa.go-parts.com/gcc/releases/gcc-5.2.0/gcc-5.2.0.tar.gz"] localPath = pullsuite(suiteDir, urls) tar(localPath, suiteDir, ['gcc-5.2.0/gcc/testsuite/']) os.remove(localPath)
def _sigtest_check(checktype, args, suite=None, projects=None): """run sigtest against Java projects with API""" sigtestlib = mx.library('SIGTEST').get_path(resolve=True) nonTestProjects = [p for p in mx.projects() if _should_test_project(p)] if not nonTestProjects: return 1 javaCompliance = max([p.javaCompliance for p in nonTestProjects]) class OutputCapture: def __init__(self): self.data = "" def __call__(self, data): self.data += data failed = None for p in nonTestProjects: sigtestResults = p.dir + os.sep + 'snapshot.sigtest' if not os.path.exists(sigtestResults): continue jdk = mx.get_jdk(javaCompliance) cmd = ['-cp', mx._cygpathU2W(sigtestlib), 'com.sun.tdk.signaturetest.SignatureTest', '-Static', '-Mode', 'bin', '-FileName', sigtestResults, '-ClassPath', mx.classpath(p, jdk=jdk) + os.pathsep + jdk.bootclasspath(), ] if checktype != 'all': cmd.append('-b') for pkg in mx._find_packages(p): cmd = cmd + ['-PackageWithoutSubpackages', pkg] out = OutputCapture() print 'Checking ' + checktype + ' signature changes against ' + sigtestResults exitcode = mx.run_java(cmd, nonZeroIsFatal=False, jdk=mx.get_jdk(javaCompliance), out=out, err=out) mx.ensure_dir_exists(p.get_output_root()) with open(p.get_output_root() + os.path.sep + 'sigtest-junit.xml', 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8" ?>\n') f.write('<testsuite tests="1" name="' + p.name + '.sigtest.' + checktype + '">\n') f.write('<testcase classname="' + p.name + '" name="sigtest.' + checktype + '">\n') if exitcode != 95: print out.data failed = sigtestResults f.write('<failure type="SignatureCheck"><![CDATA[\n') f.write(out.data) f.write(']]></failure>') else: f.write('<system-err><![CDATA[\n') f.write(out.data) f.write(']]></system-err>') f.write('</testcase>\n') f.write('</testsuite>\n') if failed: mx.abort('Signature error in ' + failed) else: print 'OK.' return 0
def pullInstallDragonEgg(args=None): """downloads and installs dragonegg (assumes that compatible GCC and G++ versions are installed)""" toolDir = join(_toolDir, "tools/dragonegg") mx.ensure_dir_exists(toolDir) url = 'http://llvm.org/releases/3.2/dragonegg-3.2.src.tar.gz' localPath = pullsuite(toolDir, [url]) tar(localPath, toolDir) os.remove(localPath) os.environ['GCC'] = getGCC() os.environ['CXX'] = getGPP() os.environ['CC'] = getGCC() os.environ['LLVM_CONFIG'] = _toolDir + 'tools/llvm/bin/llvm-config' compileCommand = ['make'] return mx.run(compileCommand, cwd=_toolDir + 'tools/dragonegg/dragonegg-3.2.src')
def pullInstallDragonEgg(args=None): """downloads and installs dragonegg (assumes that GCC 4.6 is on the path)""" if hasDragoneggGCCInstalled(): toolDir = join(_toolDir, "tools/dragonegg") mx.ensure_dir_exists(toolDir) url = 'http://llvm.org/releases/3.2/dragonegg-3.2.src.tar.gz' localPath = pullsuite(toolDir, [url]) tar(localPath, toolDir) os.remove(localPath) os.environ['GCC'] = 'gcc-4.6' os.environ['LLVM_CONFIG'] = _toolDir + 'tools/llvm/bin/llvm-config' compileCommand = ['make'] mx.run(compileCommand, cwd=_toolDir + 'tools/dragonegg/dragonegg-3.2.src') else: print 'could not find gcc-4.6, skip installing dragonegg!'
def testdownstream(args): """test downstream users of GraalCore""" parser = ArgumentParser(prog='mx testdownstream') parser.add_argument('--target', action='store', help='URL of client repo to clone', required=True, metavar='<url>') parser.add_argument('--suitedir', action='store', help='directory of target suite in client repo', default='.', metavar='<path>') parser.add_argument('-C', dest='clientMxCmd', action='append', help='arg to mx command run on client (e.g., -C-v -C--strict-compliance -Cgate)', default=[], metavar='<arg>') args = parser.parse_args(args) workDir = join(_suite.get_output_root(), 'testdownstream') mirror = join(workDir, _suite.name) if exists(mirror): shutil.rmtree(mirror) mx.ensure_dir_exists(mirror) for f in os.listdir(_suite.dir): subDir = join(_suite.dir, f) if subDir == _suite.get_output_root(): continue src = join(_suite.dir, f) dst = join(mirror, f) mx.logv('[Creating symlink from {} to {}]'.format(dst, src)) os.symlink(src, dst) # Deduce a target name from the target URL url = urlparse(args.target) targetName = url.path if targetName.rfind('/') != -1: targetName = targetName[targetName.rfind('/') + 1:] if targetName.endswith('.git'): targetName = targetName[0:-len('.git')] targetDir = join(workDir, targetName) git = mx.GitConfig() if exists(targetDir): git.pull(targetDir) else: git.clone(args.target, targetDir) # See if there's a matching (non-master) branch downstream and use it if there is branch = git.git_command(_suite.dir, ['rev-parse', '--abbrev-ref', 'HEAD']).strip() if branch != 'master': git.git_command(targetDir, ['checkout', branch], abortOnError=False) targetSuiteDir = join(targetDir, args.suitedir) cmd = ['--java-home=' + mx.get_jdk().home] + args.clientMxCmd mx.logv('[running "mx ' + ' '.join(cmd) + '" in ' + targetSuiteDir + ']') return mx.run_mx(cmd, targetSuiteDir)
def _fetch_test_suite(dest, library_names): def _get_lib_path(_lib_name): return mx.library(_lib_name).get_path(resolve=True) _extract = False for _lib_name in library_names: if not exists(dest) or getmtime( _get_lib_path(_lib_name)) > getmtime(dest): mx.logv('{} needs to be extracted'.format(_lib_name)) _extract = True break if _extract: if exists(dest): mx.logv('Deleting the old test directory {}'.format(dest)) shutil.rmtree(dest) mx.ensure_dir_exists(dest) for _lib_name in library_names: with tarfile.open(_get_lib_path(_lib_name), 'r') as _tar: _tar.extractall(dest)
def pullInstallDragonEgg(args=None): """downloads and installs dragonegg (assumes that compatible GCC and G++ versions are installed)""" toolDir = join(_toolDir, "tools/dragonegg") mx.ensure_dir_exists(toolDir) url = 'http://llvm.org/releases/3.2/dragonegg-3.2.src.tar.gz' localPath = pullsuite(toolDir, [url]) tar(localPath, toolDir) os.remove(localPath) if mx.get_os() == 'darwin': gccToolDir = join(_toolDir, "tools/gcc") url = 'http://ftpmirror.gnu.org/gcc/gcc-4.6.4/gcc-4.6.4.tar.gz' localPath = pullsuite(gccToolDir, [url]) tar(localPath, gccToolDir) os.remove(localPath) mx.run(['patch', '-p1', _toolDir + 'tools/dragonegg/dragonegg-3.2.src/Makefile', 'mx.sulong/dragonegg-mac.patch']) os.environ['GCC'] = getGCC() os.environ['CXX'] = getGPP() os.environ['CC'] = getGCC() os.environ['LLVM_CONFIG'] = _toolDir + 'tools/llvm/bin/llvm-config' compileCommand = ['make'] return mx.run(compileCommand, cwd=_toolDir + 'tools/dragonegg/dragonegg-3.2.src')
def _find_classes_by_annotated_methods(annotations, dists, jdk=None): if len(dists) == 0: return {} candidates = {} # Create map from jar file to the binary suite distribution defining it jarsToDists = {d.classpath_repr(): d for d in dists} primarySuite = mx.primary_suite() cachesDir = None jarsToParse = [] if primarySuite and primarySuite != mx._mx_suite: cachesDir = mx.ensure_dir_exists( join(primarySuite.get_output_root(), 'unittest')) for d in dists: jar = d.classpath_repr() testclasses = _read_cached_testclasses( cachesDir, jar, jdk if jdk else mx.get_jdk()) if testclasses is not None: for classname in testclasses: candidates[classname] = jarsToDists[jar] else: jarsToParse.append(jar) if jarsToParse: # Ensure Java support class is built mx.build(['--no-daemon', '--dependencies', 'com.oracle.mxtool.junit']) cp = mx.classpath(['com.oracle.mxtool.junit'] + list(jarsToDists.values()), jdk=jdk) out = mx.LinesOutputCapture() mx.run_java([ '-cp', cp, 'com.oracle.mxtool.junit.FindClassesByAnnotatedMethods' ] + annotations + jarsToParse, out=out, addDefaultArgs=False) for line in out.lines: parts = line.split(' ') jar = parts[0] reportedclasses = parts[1:] if len(parts) > 1 else [] testclasses = [c for c in reportedclasses if not c.startswith("!")] excludedclasses = [c for c in reportedclasses if c.startswith("!")] if cachesDir: _write_cached_testclasses(cachesDir, jar, jdk if jdk else mx.get_jdk(), testclasses, excludedclasses) for classname in testclasses: candidates[classname] = jarsToDists[jar] return candidates
def pullInstallDragonEgg(args=None): """downloads and installs dragonegg (assumes that compatible GCC and G++ versions are installed)""" toolDir = join(_toolDir, "dragonegg") mx.ensure_dir_exists(toolDir) url = 'https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/dragonegg-3.2.src.tar.gz' localPath = pullsuite(toolDir, [url]) tar(localPath, toolDir) os.remove(localPath) if mx.get_os() == 'darwin': gccToolDir = join(_toolDir, "gcc") url = 'https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/gcc-4.6.4.tar.gz' localPath = pullsuite(gccToolDir, [url]) tar(localPath, gccToolDir) os.remove(localPath) mx.run(['patch', '-p1', join(_toolDir, 'dragonegg', 'dragonegg-3.2.src', 'Makefile'), join('mx.sulong', 'dragonegg-mac.patch')]) os.environ['GCC'] = getGCC() os.environ['CXX'] = getGPP() os.environ['CC'] = getGCC() pullLLVMBinaries() os.environ['LLVM_CONFIG'] = findLLVMProgramForDragonegg('llvm-config') compileCommand = ['make'] return mx.run(compileCommand, cwd=join(_toolDir, 'dragonegg', 'dragonegg-3.2.src'))
def build(self): outputDir = self.subject.output_dir() snapshotToolDistribution = 'graal-js:TRUFFLE_JS_SNAPSHOT_TOOL' pythonCmd = join(_suite.mxDir, 'python2/python') moduleSet = self.modulesToSnapshot() outputDirBin = join(outputDir, 'lib') mx.ensure_dir_exists(outputDirBin) macroFiles = [] # Lttng is disabled by default on all platforms macroFiles.append('src/nolttng_macros.py') # performance counters are enabled by default only on Windows if _currentOs is not 'windows': macroFiles.append('src/noperfctr_macros.py') # DTrace is disabled explicitly by the --without-dtrace option # ETW is enabled by default only on Windows if _currentOs is not 'windows': macroFiles.append('src/notrace_macros.py') mx.run([pythonCmd, 'tools/expand-js-modules.py', outputDir] + [join('lib', m) for m in moduleSet] + macroFiles, cwd=_suite.dir) if not (hasattr(self.args, "jdt") and self.args.jdt and not self.args.force_javac): mx.run_java([ '-cp', mx.classpath([snapshotToolDistribution]), mx.distribution(snapshotToolDistribution).mainClass, '--binary', '--outdir=' + outputDirBin, '--indir=' + outputDirBin ] + ['--file=' + m for m in moduleSet], cwd=outputDirBin) mx.run([ pythonCmd, join(_suite.dir, 'tools/snapshot2c.py'), 'node_snapshots.h' ] + [join('lib', m + '.bin') for m in moduleSet], cwd=outputDir)
def pullLLVMBinaries(args=None): """downloads the LLVM binaries""" toolDir = join(_toolDir, "tools/llvm") mx.ensure_dir_exists(toolDir) osStr = mx.get_os() arch = mx.get_arch() if osStr == "windows": print "windows currently only supported with cygwin!" return elif osStr == "linux": if arch == "amd64": urls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/clang+llvm-3.2-x86_64-linux-ubuntu-12.04.tar.gz"] else: urls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/clang+llvm-3.2-x86-linux-ubuntu-12.04.tar.gz"] elif osStr == "darwin": urls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/clang+llvm-3.2-x86_64-apple-darwin11.tar.gz"] elif osStr == "cygwin": urls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/clang+llvm-3.2-x86-mingw32-EXPERIMENTAL.tar.gz"] else: print osStr, arch, "not supported!" localPath = pullsuite(toolDir, urls) tar(localPath, toolDir, stripLevels=1) os.remove(localPath)
def build(self): outputDir = self.subject.output_dir() snapshotToolDistribution = 'graal-js:TRUFFLE_JS_SNAPSHOT_TOOL' moduleSet = self.modulesToSnapshot() outputDirBin = join(outputDir, 'lib') mx.ensure_dir_exists(outputDirBin) macroFiles = [join('tools', 'js2c_macros', 'check_macros.py')] # DTrace is disabled explicitly by the --without-dtrace option # ETW is enabled by default only on Windows if _currentOs != 'windows': macroFiles.append(join('tools', 'js2c_macros', 'notrace_macros.py')) mx.run([python_cmd(), join('tools', 'expand-js-modules.py'), outputDir] + [join('lib', m) for m in moduleSet] + macroFiles, cwd=_suite.dir) if not (hasattr(self.args, "jdt") and self.args.jdt and not self.args.force_javac): mx.run_java(['-cp', mx.classpath([snapshotToolDistribution]), mx.distribution(snapshotToolDistribution).mainClass, '--binary', '--outdir=' + outputDirBin, '--indir=' + outputDirBin] + ['--file=' + m for m in moduleSet], cwd=outputDirBin) mx.run([python_cmd(), join(_suite.dir, 'tools', 'snapshot2c.py'), 'node_snapshots.h'] + [join('lib', m + '.bin') for m in moduleSet], cwd=outputDir)
def pullLLVMBinaries(args=None): """downloads the LLVM binaries""" toolDir = join(_toolDir, "llvm") mx.ensure_dir_exists(toolDir) osStr = mx.get_os() arch = mx.get_arch() if osStr == 'windows': mx.log_error('windows currently only supported with cygwin!') return elif osStr == 'linux': if arch == 'amd64': urls = ['https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/clang+llvm-3.2-x86_64-linux-ubuntu-12.04.tar.gz'] else: urls = ['https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/clang+llvm-3.2-x86-linux-ubuntu-12.04.tar.gz'] elif osStr == 'darwin': urls = ['https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/clang+llvm-3.2-x86_64-apple-darwin11.tar.gz'] elif osStr == 'cygwin': urls = ['https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/clang+llvm-3.2-x86-mingw32-EXPERIMENTAL.tar.gz'] else: mx.log_error("{0} {1} not supported!".format(osStr, arch)) localPath = pullsuite(toolDir, urls) tar(localPath, toolDir, stripLevels=1) os.remove(localPath)
def jacocoreport(args): """create a JaCoCo coverage report Creates the report from the 'jacoco.exec' file in the current directory. Default output directory is 'coverage', but an alternative can be provided as an argument.""" jacocoreport = mx.library("JACOCOREPORT", True) out = 'coverage' if len(args) == 1: out = args[0] elif len(args) > 1: mx.abort('jacocoreport takes only one argument : an output directory') includes = list(_jacoco_includes) for p in mx.projects(): projsetting = getattr(p, 'jacoco', '') if projsetting == 'include' or projsetting == '': includes.append(p.name) includedirs = set() for p in mx.projects(): projsetting = getattr(p, 'jacoco', '') if projsetting == 'exclude': continue for include in includes: if include in p.dir: includedirs.add(p.dir) for i in includedirs: bindir = i + '/bin' mx.ensure_dir_exists(bindir) mx.run_java([ '-jar', jacocoreport.get_path(True), '--in', 'jacoco.exec', '--out', out ] + sorted(includedirs))
def _ninja_deps(cls): # pylint: disable=no-self-argument deps = [] try: subprocess.check_output(['ninja', '--version'], stderr=subprocess.STDOUT) except OSError: dep = mx.library('NINJA', False) if dep: deps.append(dep.qualifiedName()) Ninja.binary = mx.join(dep.get_path(False), 'ninja') else: # necessary until GR-13214 is resolved mx.warn('Make `ninja` binary available via PATH to build native projects.') try: import ninja_syntax # pylint: disable=unused-variable, unused-import except ImportError: dep = mx.library('NINJA_SYNTAX') deps.append(dep.qualifiedName()) module_path = mx.join(dep.get_path(False), 'ninja_syntax-{}'.format(dep.version)) mx.ensure_dir_exists(module_path) # otherwise, import machinery will ignore it sys.path.append(module_path) return deps
def testgraal(args): cloneFrom = mx.get_env("GRAAL_URL") if not cloneFrom: cloneFrom = "http://github.com/graalvm/graal-core" graalSuiteSubDir = mx.get_env("GRAAL_SUITE_SUBDIR") suite = mx.suite('truffle') suiteDir = suite.dir workDir = join(suite.get_output_root(), 'sanitycheck') mx.ensure_dir_exists(join(workDir, suite.name)) for f in os.listdir(suiteDir): subDir = os.path.join(suiteDir, f) if subDir == suite.get_output_root(): continue src = join(suiteDir, f) tgt = join(workDir, suite.name, f) if isdir(src): if exists(tgt): shutil.rmtree(tgt) shutil.copytree(src, tgt) else: shutil.copy(src, tgt) sanityDir = join(workDir, 'sanity') git = mx.GitConfig() if exists(sanityDir): git.pull(sanityDir) else: git.clone(cloneFrom, sanityDir) sanitySuiteDir = sanityDir if graalSuiteSubDir is None else join( sanityDir, graalSuiteSubDir) return mx.run_mx([ '-v', '--java-home=' + mx.get_jdk().home, 'gate', '-B--force-deprecation-as-warning', '--tags', 'build,test' ], sanitySuiteDir)
def _netbeansinit_suite(args, suite, refreshOnly=False, buildProcessorJars=True): mxOutputDir = mx.ensure_dir_exists(suite.get_mx_output_dir()) configZip = mx.TimeStampFile(join(mxOutputDir, 'netbeans-config.zip')) configLibsZip = join(mxOutputDir, 'eclipse-config-libs.zip') if refreshOnly and not configZip.exists(): return if mx_ideconfig._check_ide_timestamp(suite, configZip, 'netbeans'): mx.logv('[NetBeans configurations are up to date - skipping]') return files = [] libFiles = [] jdks = set() for p in suite.projects: if not p.isJavaProject(): continue if exists(join(p.dir, 'plugin.xml')): # eclipse plugin project continue includedInDists = [d for d in suite.dists if p in d.archived_deps()] _netbeansinit_project(p, jdks, files, libFiles, includedInDists) mx.log('If using NetBeans:') # http://stackoverflow.com/questions/24720665/cant-resolve-jdk-internal-package mx.log( ' 1. Edit etc/netbeans.conf in your NetBeans installation and modify netbeans_default_options variable to include "-J-DCachingArchiveProvider.disableCtSym=true"' ) mx.log( ' 2. Ensure that the following platform(s) are defined (Tools -> Java Platforms):' ) for jdk in jdks: mx.log(' JDK_' + str(jdk.version)) mx.log( ' 3. Open/create a Project Group for the directory containing the projects (File -> Project Group -> New Group... -> Folder of Projects)' ) mx_ideconfig._zip_files(files, suite.dir, configZip.path) mx_ideconfig._zip_files(libFiles, suite.dir, configLibsZip)
def _runmultimake(args): """run the JDK make process for one or more configurations""" jvmVariantsDefault = ','.join(_jdkJvmVariants) debugLevelsDefault = ','.join(_jdkDebugLevels) parser = ArgumentParser(prog='mx multimake') parser.add_argument('--jdk-jvm-variants', '--vms', help='a comma separated list of VMs to build (default: ' + jvmVariantsDefault + ')', metavar='<args>', default=jvmVariantsDefault) parser.add_argument('--jdk-debug-levels', '--builds', help='a comma separated list of JDK debug levels (default: ' + debugLevelsDefault + ')', metavar='<args>', default=debugLevelsDefault) parser.add_argument('-n', '--no-check', action='store_true', help='omit running "java -version" after each build') select = parser.add_mutually_exclusive_group() select.add_argument('-c', '--console', action='store_true', help='send build output to console instead of log files') select.add_argument('-d', '--output-dir', help='directory for log files instead of current working directory', default=os.getcwd(), metavar='<dir>') args = parser.parse_args(args) jvmVariants = args.jdk_jvm_variants.split(',') debugLevels = [_translateLegacyDebugLevel(dl) for dl in args.jdk_debug_levels.split(',')] allStart = time.time() for jvmVariant in jvmVariants: for debugLevel in debugLevels: if not args.console: logFile = join(mx.ensure_dir_exists(args.output_dir), jvmVariant + '-' + debugLevel + '.log') log = open(logFile, 'wb') start = time.time() mx.log('BEGIN: ' + jvmVariant + '-' + debugLevel + '\t(see: ' + logFile + ')') verbose = ['-v'] if mx.get_opts().verbose else [] # Run as subprocess so that output can be directed to a file cmd = [sys.executable, '-u', mx.__file__] + verbose + ['--jdk-jvm-variant=' + jvmVariant, '--jdk-debug-level=' + debugLevel, 'make'] mx.logv("executing command: " + str(cmd)) subprocess.check_call(cmd, cwd=_suite.dir, stdout=log, stderr=subprocess.STDOUT) duration = datetime.timedelta(seconds=time.time() - start) mx.log('END: ' + jvmVariant + '-' + debugLevel + '\t[' + str(duration) + ']') else: with VM(jvmVariant=jvmVariant, debugLevel=debugLevel): _runmake([]) if not args.no_check: with VM(jvmciMode='jit'): run_vm(['-XX:-BootstrapJVMCI', '-version']) allDuration = datetime.timedelta(seconds=time.time() - allStart) mx.log('TOTAL TIME: ' + '[' + str(allDuration) + ']')
def _find_classes_by_annotated_methods(annotations, dists, jdk=None): if len(dists) == 0: return {} candidates = {} # Create map from jar file to the binary suite distribution defining it jarsToDists = {d.classpath_repr(): d for d in dists} primarySuite = mx.primary_suite() cachesDir = None jarsToParse = [] if primarySuite and primarySuite != mx._mx_suite: cachesDir = mx.ensure_dir_exists(join(primarySuite.get_output_root(), 'unittest')) for d in dists: jar = d.classpath_repr() testclasses = _read_cached_testclasses(cachesDir, jar) if testclasses is not None: for classname in testclasses: candidates[classname] = jarsToDists[jar] else: jarsToParse.append(jar) if jarsToParse: # Ensure Java support class is built mx.build(['--no-daemon', '--dependencies', 'com.oracle.mxtool.junit']) cp = mx.classpath(['com.oracle.mxtool.junit'] + jarsToDists.values(), jdk=jdk) out = mx.LinesOutputCapture() mx.run_java(['-cp', cp, 'com.oracle.mxtool.junit.FindClassesByAnnotatedMethods'] + annotations + jarsToParse, out=out, addDefaultArgs=False) for line in out.lines: parts = line.split(' ') jar = parts[0] testclasses = parts[1:] if len(parts) > 1 else [] if cachesDir: _write_cached_testclasses(cachesDir, jar, testclasses) for classname in testclasses: candidates[classname] = jarsToDists[jar] return candidates
def _check_jvmci_version(jdk): """ Runs a Java utility to check that `jdk` supports the minimum JVMCI API required by Graal. """ simplename = 'JVMCIVersionCheck' name = 'org.graalvm.compiler.hotspot.' + simplename binDir = mx.ensure_dir_exists(join(_suite.get_output_root(), '.jdk' + str(jdk.version))) if isinstance(_suite, mx.BinarySuite): javaSource = join(binDir, simplename + '.java') if not exists(javaSource): dists = [d for d in _suite.dists if d.name == 'GRAAL_HOTSPOT'] assert len(dists) == 1, 'could not find GRAAL_HOTSPOT distribution' d = dists[0] assert exists(d.sourcesPath), 'missing expected file: ' + d.sourcesPath with zipfile.ZipFile(d.sourcesPath, 'r') as zf: with open(javaSource, 'w') as fp: fp.write(zf.read(name.replace('.', '/') + '.java')) else: javaSource = join(_suite.dir, 'src', 'org.graalvm.compiler.hotspot', 'src', name.replace('.', '/') + '.java') javaClass = join(binDir, name.replace('.', '/') + '.class') if not exists(javaClass) or getmtime(javaClass) < getmtime(javaSource): mx.run([jdk.javac, '-d', binDir, javaSource]) mx.run([jdk.java, '-cp', binDir, name])
def _check_jvmci_version(jdk): """ Runs a Java utility to check that `jdk` supports the minimum JVMCI API required by Graal. """ simplename = 'JVMCIVersionCheck' name = 'com.oracle.graal.hotspot.' + simplename binDir = mx.ensure_dir_exists(join(_suite.get_output_root(), '.jdk' + str(jdk.version))) if isinstance(_suite, mx.BinarySuite): javaSource = join(binDir, simplename + '.java') if not exists(javaSource): dists = [d for d in _suite.dists if d.name == 'GRAAL_HOTSPOT'] assert len(dists) == 1, 'could not find GRAAL_HOTSPOT distribution' d = dists[0] assert exists(d.sourcesPath), 'missing expected file: ' + d.sourcesPath with zipfile.ZipFile(d.sourcesPath, 'r') as zf: with open(javaSource, 'w') as fp: fp.write(zf.read(name.replace('.', '/') + '.java')) else: javaSource = join(_suite.dir, 'graal', 'com.oracle.graal.hotspot', 'src', name.replace('.', '/') + '.java') javaClass = join(binDir, name.replace('.', '/') + '.class') if not exists(javaClass) or getmtime(javaClass) < getmtime(javaSource): mx.run([jdk.javac, '-d', binDir, javaSource]) mx.run([jdk.java, '-cp', binDir, name])
def _update_JVMCI_library(): """ Updates the "path" and "sha1" attributes of the "JVMCI" library to refer to a jvmci.jar created from the JVMCI classes in JDK9. """ suiteDict = _suite.suiteDict jvmciLib = suiteDict['libraries']['JVMCI'] d = join(_suite.get_output_root(), abspath(_jdk.home)[1:]) path = join(d, 'jvmci.jar') explodedModule = join(_jdk.home, 'modules', 'jdk.vm.ci') if exists(explodedModule): jarInputs = {} newestJarInput = None for root, _, files in os.walk(explodedModule): relpath = root[len(explodedModule) + 1:] for f in files: arcname = join(relpath, f).replace(os.sep, '/') jarInput = join(root, f) jarInputs[arcname] = jarInput t = mx.TimeStampFile(jarInput) if newestJarInput is None or t.isNewerThan(newestJarInput): newestJarInput = t if not exists(path) or newestJarInput.isNewerThan(path): with mx.Archiver(path, kind='zip') as arc: for arcname, jarInput in jarInputs.iteritems(): with open(jarInput, 'rb') as fp: contents = fp.read() arc.zf.writestr(arcname, contents) else: # Use the jdk.internal.jimage utility since it's the only way # to partially read .jimage files as the JDK9 jimage tool # does not support partial extraction. bootmodules = join(_jdk.home, 'lib', 'modules', 'bootmodules.jimage') if not exists(bootmodules): mx.abort('Could not find JVMCI classes at ' + bootmodules + ' or ' + explodedModule) if not exists(path) or mx.TimeStampFile(bootmodules).isNewerThan(path): mx.ensure_dir_exists(d) javaSource = join(d, 'ExtractJVMCI.java') with open(javaSource, 'w') as fp: print >> fp, """import java.io.FileOutputStream; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import jdk.internal.jimage.BasicImageReader; public class ExtractJVMCI { public static void main(String[] args) throws Exception { BasicImageReader image = BasicImageReader.open(args[0]); String[] names = image.getEntryNames(); if (names.length == 0) { return; } try (JarOutputStream jos = new JarOutputStream(new FileOutputStream(args[1]))) { for (String name : names) { if (name.startsWith("/jdk.vm.ci/")) { String ename = name.substring("/jdk.vm.ci/".length()); JarEntry je = new JarEntry(ename); jos.putNextEntry(je); jos.write(image.getResource(name)); jos.closeEntry(); } } } } } """ mx.run([_jdk.javac, '-d', d, javaSource]) mx.run([_jdk.java, '-cp', d, 'ExtractJVMCI', bootmodules, path]) if not exists(path): mx.abort('Could not find the JVMCI classes in ' + bootmodules) jvmciLib['path'] = path jvmciLib['sha1'] = mx.sha1OfFile(path)
def _eclipseinit(self, files=None, libFiles=None): """ Generates an Eclipse project for each HotSpot build configuration. """ roots = ['cpu', 'os', 'os_cpu', 'share'] for jvmVariant in _jdkJvmVariants: for debugLevel in _jdkDebugLevels: name = jvmVariant + '-' + debugLevel eclProjectDir = join(self.dir, 'eclipse', name) mx.ensure_dir_exists(eclProjectDir) out = mx.XMLDoc() out.open('projectDescription') out.element('name', data='hotspot:' + name) out.element('comment', data='') out.element('projects', data='') out.open('buildSpec') out.open('buildCommand') out.element( 'name', data= 'org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder') out.element('triggers', data='full,incremental') out.element('arguments', data='') out.close('buildCommand') out.close('buildSpec') out.open('natures') out.element('nature', data='org.eclipse.cdt.core.cnature') out.element('nature', data='org.eclipse.cdt.core.ccnature') out.element( 'nature', data= 'org.eclipse.cdt.managedbuilder.core.managedBuildNature') out.element( 'nature', data= 'org.eclipse.cdt.managedbuilder.core.ScannerConfigNature') out.close('natures') if roots: out.open('linkedResources') for r in roots: f = join(_suite.dir, r) out.open('link') out.element('name', data=r) out.element('type', data='2' if isdir(f) else '1') out.element( 'locationURI', data=mx.get_eclipse_project_rel_locationURI( f, eclProjectDir)) out.close('link') out.open('link') out.element('name', data='gensrc') out.element('type', data='2') generated = join( _get_hotspot_build_dir(jvmVariant, debugLevel), 'gensrc') out.element('locationURI', data=mx.get_eclipse_project_rel_locationURI( generated, eclProjectDir)) out.close('link') out.close('linkedResources') out.close('projectDescription') projectFile = join(eclProjectDir, '.project') mx.update_file(projectFile, out.xml(indent='\t', newl='\n')) if files: files.append(projectFile) cprojectTemplate = join(self.dir, 'templates', 'eclipse', 'cproject') cprojectFile = join(eclProjectDir, '.cproject') with open(cprojectTemplate) as f: content = f.read() mx.update_file(cprojectFile, content) if files: files.append(cprojectFile) settingsDir = join(eclProjectDir, ".settings") mx.ensure_dir_exists(settingsDir) for name, source in self._get_eclipse_settings_sources( ).iteritems(): out = StringIO.StringIO() print >> out, '# GENERATED -- DO NOT EDIT' print >> out, '# Source:', source with open(source) as f: print >> out, f.read() content = out.getvalue() mx.update_file(join(settingsDir, name), content) if files: files.append(join(settingsDir, name))
def get_library_as_module(dep, jdk): """ Converts a (modular or non-modular) jar library to a module descriptor. :param Library dep: a library dependency :param JDKConfig jdk: a JDK with a version >= 9 that can be used to describe the module :return: a module descriptor """ assert dep.isLibrary() def is_valid_module_name(name): identRE = re.compile(r"^[A-Za-z][A-Za-z0-9]*$") return all(identRE.match(ident) for ident in name.split('.')) if hasattr(dep, 'moduleName'): moduleName = dep.moduleName else: moduleName = jdk.get_automatic_module_name(dep.path) if not is_valid_module_name(moduleName): mx.abort( "Invalid identifier in automatic module name derived for library {}: {} (path: {})" .format(dep.name, moduleName, dep.path)) dep.moduleName = moduleName modulesDir = mx.ensure_dir_exists( join(mx.primary_suite().get_output_root(), 'modules')) cache = join(modulesDir, moduleName + '.desc') fullpath = dep.get_path(resolve=True) save = False if not exists(cache) or mx.TimeStampFile(fullpath).isNewerThan( cache) or mx.TimeStampFile(__file__).isNewerThan(cache): out = mx.LinesOutputCapture() rc = mx.run([ jdk.java, '--module-path', fullpath, '--describe-module', moduleName ], out=out, err=out, nonZeroIsFatal=False) lines = out.lines if rc != 0: mx.abort( "java --describe-module {} failed. Please verify the moduleName attribute of {}.\n{}" .format(moduleName, dep.name, "\n".join(lines))) save = True else: with open(cache) as fp: lines = fp.read().splitlines() assert lines and lines[0].startswith(moduleName), (dep.name, moduleName, lines) accepted_modifiers = set(['transitive']) requires = {} exports = {} provides = {} uses = set() packages = set() for line in lines[1:]: parts = line.strip().split() assert len(parts) >= 2, '>>>' + line + '<<<' if parts[0:2] == ['qualified', 'exports']: parts = parts[1:] a = parts[0] if a == 'requires': module = parts[1] modifiers = parts[2:] requires[module] = set(m for m in modifiers if m in accepted_modifiers) elif a == 'exports': source = parts[1] if len(parts) > 2: assert parts[2] == 'to' targets = parts[3:] else: targets = [] exports[source] = targets elif a == 'uses': uses.update(parts[1:]) elif a == 'contains': packages.update(parts[1:]) elif a == 'provides': assert len(parts) >= 4 and parts[2] == 'with' service = parts[1] providers = parts[3:] provides.setdefault(service, []).extend(providers) else: mx.abort('Cannot parse module descriptor line: ' + str(parts)) packages.update(exports.keys()) if save: try: with open(cache, 'w') as fp: fp.write('\n'.join(lines) + '\n') except IOError as e: mx.warn('Error writing to ' + cache + ': ' + str(e)) os.remove(cache) return JavaModuleDescriptor(moduleName, exports, requires, uses, provides, packages, jarpath=fullpath)
def build(self): source_dir = self.subject.getSourceDir() output_dir = self.subject.getOutputDir() if not emcc_dir: mx.abort( "No EMCC_DIR specified - the source programs will not be compiled to .wasm." ) emcc_cmd = os.path.join(emcc_dir, "emcc") gcc_cmd = os.path.join(gcc_dir, "gcc") if mx.run([emcc_cmd, "-v"], nonZeroIsFatal=False) != 0: mx.abort("Could not check the emcc version.") if mx.run([gcc_cmd, "--version"], nonZeroIsFatal=False) != 0: mx.abort("Could not check the gcc version.") if not wabt_dir: mx.abort( "Set WABT_DIR if you want the binary to include .wat files.") mx.log("Building files from the source dir: " + source_dir) cc_flags = ["-g2", "-O3"] include_flags = [] if hasattr(self.project, "includeset"): include_flags = [ "-I", os.path.join(_suite.dir, "includes", self.project.includeset) ] emcc_flags = [ "-s", "EXIT_RUNTIME=1", "-s", "STANDALONE_WASM", "-s", "WASM_BIGINT" ] + cc_flags if self.project.isBenchmarkProject(): emcc_flags = emcc_flags + [ "-s", "EXPORTED_FUNCTIONS=" + str(self.benchmark_methods()).replace("'", "\"") + "" ] subdir_program_names = defaultdict(lambda: []) for root, filename in self.subject.getProgramSources(): if filename.startswith("_"): # Ignore files starting with an underscore continue subdir = os.path.relpath(root, self.subject.getSourceDir()) mx.ensure_dir_exists(os.path.join(output_dir, subdir)) basename = remove_extension(filename) source_path = os.path.join(root, filename) output_wasm_path = os.path.join(output_dir, subdir, basename + ".wasm") output_js_path = os.path.join(output_dir, subdir, basename + ".js") timestampedSource = mx.TimeStampFile(source_path) timestampedOutput = mx.TimeStampFile(output_wasm_path) mustRebuild = timestampedSource.isNewerThan( timestampedOutput) or not timestampedOutput.exists() # Step 1: build the .wasm binary. if mustRebuild: if filename.endswith(".c"): # This generates both a js file and a wasm file. # See https://github.com/emscripten-core/emscripten/wiki/WebAssembly-Standalone build_cmd_line = [emcc_cmd] + emcc_flags + [ source_path, "-o", output_js_path ] + include_flags if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not build the wasm-only output of " + filename + " with emcc.") elif filename.endswith(".wat"): # Step 1: compile the .wat file to .wasm. wat2wasm_cmd = os.path.join(wabt_dir, "wat2wasm") build_cmd_line = [ wat2wasm_cmd, "-o", output_wasm_path, source_path ] if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not translate " + filename + " to binary format.") elif filename.endswith(".wasm"): shutil.copyfile(source_path, output_wasm_path) else: mx.logv("skipping, file is up-to-date: " + source_path) # Step 2: copy the result file if it exists. result_path = os.path.join(root, basename + ".result") if os.path.isfile(result_path): result_output_path = os.path.join(output_dir, subdir, basename + ".result") shutil.copyfile(result_path, result_output_path) # Step 3: copy the opts file if it exists. opts_path = os.path.join(root, basename + ".opts") if os.path.isfile(opts_path): opts_output_path = os.path.join(output_dir, subdir, basename + ".opts") shutil.copyfile(opts_path, opts_output_path) output_wat_path = os.path.join(output_dir, subdir, basename + ".wat") if mustRebuild: if filename.endswith(".c"): # Step 4: produce the .wat files, for easier debugging. wasm2wat_cmd = os.path.join(wabt_dir, "wasm2wat") if mx.run([ wasm2wat_cmd, "-o", output_wat_path, output_wasm_path ], nonZeroIsFatal=False) != 0: mx.abort("Could not compile .wat file for " + filename) elif filename.endswith(".wat"): # Step 4: copy the .wat file, for easier debugging. wat_path = os.path.join(root, basename + ".wat") shutil.copyfile(wat_path, output_wat_path) # Step 5: if this is a benchmark project, create native binaries too. if mustRebuild: if filename.endswith(".c"): mx.ensure_dir_exists( os.path.join(output_dir, subdir, NATIVE_BENCH_DIR)) output_path = os.path.join(output_dir, subdir, NATIVE_BENCH_DIR, mx.exe_suffix(basename)) link_flags = ["-lm"] gcc_cmd_line = [gcc_cmd] + cc_flags + [ source_path, "-o", output_path ] + include_flags + link_flags if mx.run(gcc_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not build the native binary of " + filename + ".") os.chmod(output_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) elif filename.endswith(".wat"): mx.warn( "The .wat files are not translated to native binaries: " + filename) # Remember the source name. subdir_program_names[subdir].append(basename) for subdir in subdir_program_names: with open(os.path.join(output_dir, subdir, "wasm_test_index"), "w") as f: for name in subdir_program_names[subdir]: f.write(name) f.write("\n")
def testdownstream(suite, repoUrls, relTargetSuiteDir, mxCommands, branch=None): """ Tests a downstream repo against the current working directory state of `suite`. :param mx.Suite suite: the suite to test against the downstream repo :param list repoUrls: URLs of downstream repos to clone, the first of which is the repo being tested :param str relTargetSuiteDir: directory of the downstream suite to test relative to the top level directory of the downstream repo being tested :param list mxCommands: argument lists for the mx commands run in downstream suite being tested :param str branch: name of branch to look for in downstream repo(s) """ assert len(repoUrls) > 0 workDir = join(suite.get_output_root(), 'testdownstream') # A mirror of the current suite is created with symlinks mirror = join(workDir, suite.name) if exists(mirror): shutil.rmtree(mirror) mx.ensure_dir_exists(mirror) for f in os.listdir(suite.dir): subDir = join(suite.dir, f) if subDir == suite.get_output_root(): continue src = join(suite.dir, f) dst = join(mirror, f) mx.logv('[Creating symlink from {} to {}]'.format(dst, src)) relsrc = os.path.relpath(src, os.path.dirname(dst)) os.symlink(relsrc, dst) targetDir = None for repoUrl in repoUrls: # Deduce a target name from the target URL url = urlparse(repoUrl) targetName = url.path if targetName.rfind('/') != -1: targetName = targetName[targetName.rfind('/') + 1:] if targetName.endswith('.git'): targetName = targetName[0:-len('.git')] repoWorkDir = join(workDir, targetName) git = mx.GitConfig() if exists(repoWorkDir): git.pull(repoWorkDir) else: git.clone(repoUrl, repoWorkDir) # See if there's a matching (non-master) branch downstream and use it if there is if not branch: branch = git.git_command(suite.dir, ['rev-parse', '--abbrev-ref', 'HEAD']).strip() if branch != 'master': git.git_command(repoWorkDir, ['checkout', branch], abortOnError=False) if not targetDir: targetDir = repoWorkDir assert not isabs(relTargetSuiteDir) targetSuiteDir = join(targetDir, relTargetSuiteDir) assert targetSuiteDir.startswith(targetDir) mxpy = None if suite != mx._mx_suite else join(mirror, 'mx.py') for command in mxCommands: mx.logv('[running "mx ' + ' '.join(command) + '" in ' + targetSuiteDir + ']') mx.run_mx(command, targetSuiteDir, mxpy=mxpy)
def build_binary_pkgs(args_in, **kwargs): ''' Builds binary packages of components that we cache for build speed-up. See the CI scripts for details. ''' parser = ArgumentParser() parser.add_argument( '--f2c-version', type=int, dest='f2c_version', required=True, help='Current version of f2c, the tarball will use this + 1') parser.add_argument( '--recommended-pkgs-version', default=0, type=int, dest='recommended_version', help= 'Current version of recommended packages binary, the tarball will use this + 1' ) parser.add_argument('--recommended-pkgs-list', dest='recommended_list', required=True, help='Comma separated list of recommended packages') args = parser.parse_args(args_in) os_name = platform.system().lower() dest_dir = os.path.join(_fastr_suite.dir, 'binary-packages') shutil.rmtree(dest_dir, ignore_errors=True) mx.ensure_dir_exists(dest_dir) # F2C # creates binary-packages/f2c-binary-{version}-{osname}-amd64/f2c with contents of FASTR_HOME/f2c f2c_name = 'f2c-binary-' + str(args.f2c_version + 1) + '-' + os_name + '-amd64' f2c_path = os.path.join(dest_dir, f2c_name) shutil.copytree(os.path.join(_fastr_suite.dir, 'f2c'), os.path.join(f2c_path, 'f2c')) # creates the tarball result_tarball = os.path.join(dest_dir, f2c_name + '.tar.gz') with tarfile.open(result_tarball, "w:gz") as tar: tar.add(f2c_path, arcname=os.path.basename(f2c_path)) mx.log("Binary package created at: " + result_tarball) # Recommended packages # creates binary-packages/fastr-recommended-pkgs-{version}-{osname}-amd64/fastr-recommended-pkgs pkgs_name = 'fastr-recommended-pkgs-' + str(args.recommended_version + 1) + '-' + os_name + '-amd64' pkgs_path = os.path.join(dest_dir, pkgs_name) pkgs_pkgs_path = os.path.join(pkgs_path, 'pkgs') mx.ensure_dir_exists(pkgs_pkgs_path) for pkg_name in args.recommended_list.split(','): shutil.copytree(os.path.join(_fastr_suite.dir, 'library', pkg_name), os.path.join(pkgs_pkgs_path, pkg_name)) # add file with API digest try: with open(os.path.join(pkgs_path, 'api-checksum.txt'), 'w') as f: sys.stdout = f pkgcache(['--print-api-checksum', '--vm', 'fastr']) finally: sys.stdout = sys.__stdout__ # creates the tarball result_tarball = os.path.join(dest_dir, pkgs_name + '.tar.gz') with tarfile.open(result_tarball, "w:gz") as tar: tar.add(pkgs_path, arcname=os.path.basename(pkgs_path)) mx.log("Binary package created at: " + result_tarball) mx.log("Contents of the " + dest_dir + "directory: ") mx.run(['ls', '-R', dest_dir]) return 0
def _sigtest_check(checktype, args, suite=None, projects=None): """run sigtest against Java projects with API""" sigtestlib = mx.library('SIGTEST').get_path(resolve=True) nonTestProjects = [p for p in mx.projects() if _should_test_project(p)] if not nonTestProjects: return 1 javaCompliance = max([p.javaCompliance for p in nonTestProjects]) class OutputCapture: def __init__(self): self.data = "" def __call__(self, data): self.data += data failed = None for p in nonTestProjects: sigtestResults = p.dir + os.sep + 'snapshot.sigtest' if not os.path.exists(sigtestResults): continue jdk = mx.get_jdk(javaCompliance) cmd = [ '-cp', mx._cygpathU2W(sigtestlib), 'com.sun.tdk.signaturetest.SignatureTest', '-Static', '-Mode', 'bin', '-FileName', sigtestResults, '-ClassPath', mx.classpath(p, jdk=jdk) + os.pathsep + jdk.bootclasspath(), ] if checktype != 'all': cmd.append('-b') for pkg in mx._find_packages(p): cmd = cmd + ['-PackageWithoutSubpackages', pkg] out = OutputCapture() print 'Checking ' + checktype + ' signature changes against ' + sigtestResults exitcode = mx.run_java(cmd, nonZeroIsFatal=False, jdk=mx.get_jdk(javaCompliance), out=out, err=out) mx.ensure_dir_exists(p.get_output_root()) with open(p.get_output_root() + os.path.sep + 'sigtest-junit.xml', 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8" ?>\n') f.write('<testsuite tests="1" name="' + p.name + '.sigtest.' + checktype + '">\n') f.write('<testcase classname="' + p.name + '" name="sigtest.' + checktype + '">\n') if exitcode != 95: print out.data failed = sigtestResults f.write('<failure type="SignatureCheck"><![CDATA[\n') f.write(out.data) f.write(']]></failure>') else: f.write('<system-err><![CDATA[\n') f.write(out.data) f.write(']]></system-err>') f.write('</testcase>\n') f.write('</testsuite>\n') if failed: mx.abort('Signature error in ' + failed) else: print 'OK.' return 0
def _intellij_suite(args, s, declared_modules, referenced_modules, sdks, refreshOnly=False, mx_python_modules=False, generate_external_projects=True, java_modules=True, module_files_only=False, generate_native_projects=False): libraries = set() jdk_libraries = set() project_dir = s.dir ideaProjectDirectory = join(project_dir, '.idea') modulesXml = mx.XMLDoc() if not module_files_only and not s.isBinarySuite(): mx.ensure_dir_exists(ideaProjectDirectory) nameFile = join(ideaProjectDirectory, '.name') mx.update_file(nameFile, s.name) modulesXml.open('project', attributes={'version': '4'}) modulesXml.open('component', attributes={'name': 'ProjectModuleManager'}) modulesXml.open('modules') def _intellij_exclude_if_exists(xml, p, name, output=False): root = p.get_output_root() if output else p.dir path = join(root, name) if exists(path): excludeRoot = p.get_output_root() if output else '$MODULE_DIR$' excludePath = join(excludeRoot, name) xml.element('excludeFolder', attributes={'url':'file://' + excludePath}) annotationProcessorProfiles = {} def _complianceToIntellijLanguageLevel(compliance): # they changed the name format starting with JDK_10 if compliance.value >= 10: # Lastest Idea 2018.2 only understands JDK_11 so clamp at that value return 'JDK_' + str(min(compliance.value, 11)) return 'JDK_1_' + str(compliance.value) def _intellij_external_project(externalProjects, sdks, host): if externalProjects: for project_name, project_definition in externalProjects.items(): if not project_definition.get('path', None): mx.abort("external project {} is missing path attribute".format(project_name)) if not project_definition.get('type', None): mx.abort("external project {} is missing type attribute".format(project_name)) supported = ['path', 'type', 'source', 'test', 'excluded', 'load_path'] unknown = set(project_definition.keys()) - frozenset(supported) if unknown: mx.abort("There are unsupported {} keys in {} external project".format(unknown, project_name)) path = os.path.realpath(join(host.dir, project_definition["path"])) module_type = project_definition["type"] moduleXml = mx.XMLDoc() moduleXml.open('module', attributes={'type': {'ruby': 'RUBY_MODULE', 'python': 'PYTHON_MODULE', 'web': 'WEB_MODULE'}.get(module_type, 'UKNOWN_MODULE'), 'version': '4'}) moduleXml.open('component', attributes={'name': 'NewModuleRootManager', 'inherit-compiler-output': 'true'}) moduleXml.element('exclude-output') moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'}) for name in project_definition.get('source', []): moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + name, 'isTestSource': str(False)}) for name in project_definition.get('test', []): moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + name, 'isTestSource': str(True)}) for name in project_definition.get('excluded', []): _intellij_exclude_if_exists(moduleXml, type('', (object,), {"dir": path})(), name) moduleXml.close('content') if module_type == "ruby": moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_ruby_sdk_type, 'jdkName': intellij_get_ruby_sdk_name(sdks)}) elif module_type == "python": moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_python_sdk_type, 'jdkName': intellij_get_python_sdk_name(sdks)}) elif module_type == "web": # nothing to do pass else: mx.abort("External project type {} not supported".format(module_type)) moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'}) moduleXml.close('component') load_paths = project_definition.get('load_path', []) if load_paths: if not module_type == "ruby": mx.abort("load_path is supported only for ruby type external project") moduleXml.open('component', attributes={'name': 'RModuleSettingsStorage'}) load_paths_attributes = {} load_paths_attributes['number'] = str(len(load_paths)) for i, name in enumerate(load_paths): load_paths_attributes["string" + str(i)] = "$MODULE_DIR$/" + name moduleXml.element('LOAD_PATH', load_paths_attributes) moduleXml.close('component') moduleXml.close('module') moduleFile = join(path, project_name + '.iml') mx.update_file(moduleFile, moduleXml.xml(indent=' ', newl='\n')) if not module_files_only: declared_modules.add(project_name) moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(moduleFile, s.dir) modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath}) if generate_external_projects: for p in s.projects_recursive() + mx._mx_suite.projects_recursive(): _intellij_external_project(getattr(p, 'externalProjects', None), sdks, p) max_checkstyle_version = None compilerXml = None if java_modules: if not module_files_only: compilerXml = mx.XMLDoc() compilerXml.open('project', attributes={'version': '4'}) # The IntelliJ parser seems to mishandle empty ADDITIONAL_OPTIONS_OVERRIDE elements # so only emit the section if there will be something in it. additionalOptionsOverrides = False assert not s.isBinarySuite() # create the modules (1 IntelliJ module = 1 mx project/distribution) for p in s.projects_recursive() + mx._mx_suite.projects_recursive(): if not p.isJavaProject(): continue jdk = mx.get_jdk(p.javaCompliance) assert jdk # Value of the $MODULE_DIR$ IntelliJ variable and parent directory of the .iml file. module_dir = mx.ensure_dir_exists(p.dir) processors = p.annotation_processors() if processors: annotationProcessorProfiles.setdefault((p.source_gen_dir_name(),) + tuple(processors), []).append(p) intellijLanguageLevel = _complianceToIntellijLanguageLevel(p.javaCompliance) moduleXml = mx.XMLDoc() moduleXml.open('module', attributes={'type': 'JAVA_MODULE', 'version': '4'}) moduleXml.open('component', attributes={'name': 'NewModuleRootManager', 'LANGUAGE_LEVEL': intellijLanguageLevel, 'inherit-compiler-output': 'false'}) moduleXml.element('output', attributes={'url': 'file://$MODULE_DIR$/' + os.path.relpath(p.output_dir(), module_dir)}) moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'}) for src in p.srcDirs: srcDir = mx.ensure_dir_exists(join(p.dir, src)) moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + os.path.relpath(srcDir, module_dir), 'isTestSource': str(p.is_test_project())}) for name in ['.externalToolBuilders', '.settings', 'nbproject']: _intellij_exclude_if_exists(moduleXml, p, name) moduleXml.close('content') if processors: moduleXml.open('content', attributes={'url': 'file://' + p.get_output_root()}) genDir = p.source_gen_dir() mx.ensure_dir_exists(genDir) moduleXml.element('sourceFolder', attributes={'url':'file://' + p.source_gen_dir(), 'isTestSource': str(p.is_test_project()), 'generated': 'true'}) for name in [basename(p.output_dir())]: _intellij_exclude_if_exists(moduleXml, p, name, output=True) moduleXml.close('content') moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'}) proj = p dependencies_project_packages = set() def should_process_dep(dep, edge): if dep.isTARDistribution() or dep.isNativeProject() or dep.isArchivableProject() or dep.isResourceLibrary(): mx.logv("Ignoring dependency from {} to {}".format(proj.name, dep.name)) return False return True def process_dep(dep, edge): if dep is proj: return if dep.isLibrary() or dep.isJARDistribution() or dep.isMavenProject(): libraries.add(dep) moduleXml.element('orderEntry', attributes={'type': 'library', 'name': dep.name, 'level': 'project'}) elif dep.isJavaProject(): dependencies_project_packages.update(dep.defined_java_packages()) referenced_modules.add(dep.name) moduleXml.element('orderEntry', attributes={'type': 'module', 'module-name': dep.name}) elif dep.isJdkLibrary(): jdk_libraries.add(dep) if jdk.javaCompliance < dep.jdkStandardizedSince: moduleXml.element('orderEntry', attributes={'type': 'library', 'name': dep.name, 'level': 'project'}) else: mx.logv("{} skipping {} for {}".format(p, dep, jdk)) #pylint: disable=undefined-loop-variable elif dep.isJreLibrary(): pass elif dep.isClasspathDependency(): moduleXml.element('orderEntry', attributes={'type': 'library', 'name': dep.name, 'level': 'project'}) else: mx.abort("Dependency not supported: {0} ({1})".format(dep, dep.__class__.__name__)) p.walk_deps(preVisit=should_process_dep, visit=process_dep, ignoredEdges=[mx.DEP_EXCLUDED]) moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_java_sdk_type, 'jdkName': intellij_get_java_sdk_name(sdks, jdk)}) moduleXml.close('component') if compilerXml and jdk.javaCompliance >= '9': moduleDeps = p.get_concealed_imported_packages(jdk=jdk) if moduleDeps: exports = sorted([(m, pkgs) for m, pkgs in moduleDeps.items() if dependencies_project_packages.isdisjoint(pkgs)]) if exports: args = [] exported_modules = set() for m, pkgs in exports: args += ['--add-exports={}/{}=ALL-UNNAMED'.format(m, pkg) for pkg in pkgs] exported_modules.add(m) roots = set(jdk.get_root_modules()) observable_modules = jdk.get_modules() default_module_graph = mx_javamodules.get_transitive_closure(roots, observable_modules) module_graph = mx_javamodules.get_transitive_closure(roots | exported_modules, observable_modules) extra_modules = module_graph - default_module_graph if extra_modules: args.append('--add-modules=' + ','.join((m.name for m in extra_modules))) if not additionalOptionsOverrides: additionalOptionsOverrides = True compilerXml.open('component', {'name': 'JavacSettings'}) compilerXml.open('option', {'name': 'ADDITIONAL_OPTIONS_OVERRIDE'}) compilerXml.element('module', {'name': p.name, 'options': ' '.join(args)}) # Checkstyle csConfig, checkstyleVersion, checkstyleProj = p.get_checkstyle_config() if csConfig: max_checkstyle_version = max(max_checkstyle_version, mx.VersionSpec(checkstyleVersion)) if max_checkstyle_version else mx.VersionSpec(checkstyleVersion) moduleXml.open('component', attributes={'name': 'CheckStyle-IDEA-Module'}) moduleXml.open('option', attributes={'name': 'configuration'}) moduleXml.open('map') moduleXml.element('entry', attributes={'key': "checkstyle-version", 'value': checkstyleVersion}) moduleXml.element('entry', attributes={'key': "active-configuration", 'value': "PROJECT_RELATIVE:" + join(checkstyleProj.dir, ".checkstyle_checks.xml") + ":" + checkstyleProj.name}) moduleXml.close('map') moduleXml.close('option') moduleXml.close('component') moduleXml.close('module') moduleFile = join(module_dir, p.name + '.iml') mx.update_file(moduleFile, moduleXml.xml(indent=' ', newl='\n').rstrip()) if not module_files_only: declared_modules.add(p.name) moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(moduleFile, project_dir) modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath}) if additionalOptionsOverrides: compilerXml.close('option') compilerXml.close('component') if mx_python_modules: def _python_module(suite): """ Gets a tuple describing the IntelliJ module for the python sources of `suite`. The tuple consists of the module name, module directory and the name of the .iml in the module directory. """ name = basename(suite.mxDir) module_dir = suite.mxDir return name, mx.ensure_dir_exists(module_dir), name + '.iml' def _add_declared_module(suite): if not module_files_only: name, module_dir, iml_file = _python_module(suite) declared_modules.add(name) moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(join(module_dir, iml_file), project_dir) modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath}) # mx.<suite> python module: _, module_dir, iml_file = _python_module(s) moduleXml = mx.XMLDoc() moduleXml.open('module', attributes={'type': 'PYTHON_MODULE', 'version': '4'}) moduleXml.open('component', attributes={'name': 'NewModuleRootManager', 'inherit-compiler-output': 'true'}) moduleXml.element('exclude-output') if s.name == 'mx': # MX itself is special. Python sources are also in the parent folder. moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$/..'}) moduleXml.element('sourceFolder', attributes={'url': 'file://$MODULE_DIR$/..', 'isTestSource': 'false'}) else: moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'}) moduleXml.element('sourceFolder', attributes={'url': 'file://$MODULE_DIR$/' + os.path.relpath(s.mxDir, module_dir), 'isTestSource': 'false'}) for d in os.listdir(s.mxDir): directory = join(s.mxDir, d) if isdir(directory) and mx.dir_contains_files_recursively(directory, r".*\.java"): moduleXml.element('excludeFolder', attributes={'url': 'file://$MODULE_DIR$/' + os.path.relpath(directory, module_dir)}) moduleXml.close('content') moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': intellij_python_sdk_type, 'jdkName': intellij_get_python_sdk_name(sdks)}) moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'}) processed_suites = {s.name} def _mx_projects_suite(visited_suite, suite_import): if suite_import.name in processed_suites: return processed_suites.add(suite_import.name) dep_suite = mx.suite(suite_import.name) dep_module_name, _, _ = _python_module(dep_suite) moduleXml.element('orderEntry', attributes={'type': 'module', 'module-name': dep_module_name}) _add_declared_module(dep_suite) dep_suite.visit_imports(_mx_projects_suite) s.visit_imports(_mx_projects_suite) if s.name != 'mx': moduleXml.element('orderEntry', attributes={'type': 'module', 'module-name': 'mx.mx'}) moduleXml.close('component') moduleXml.close('module') moduleFile = join(module_dir, iml_file) mx.update_file(moduleFile, moduleXml.xml(indent=' ', newl='\n')) _add_declared_module(s) _add_declared_module(mx._mx_suite) if generate_native_projects: _intellij_native_projects(s, module_files_only, declared_modules, modulesXml) if generate_external_projects: _intellij_external_project(s.suiteDict.get('externalProjects', None), sdks, s) if not module_files_only: modulesXml.close('modules') modulesXml.close('component') modulesXml.close('project') moduleXmlFile = join(ideaProjectDirectory, 'modules.xml') mx.update_file(moduleXmlFile, modulesXml.xml(indent=' ', newl='\n')) if java_modules and not module_files_only: unique_library_file_names = set() librariesDirectory = mx.ensure_dir_exists(join(ideaProjectDirectory, 'libraries')) mx.ensure_dir_exists(librariesDirectory) def make_library(name, path, source_path, suite_dir): libraryXml = mx.XMLDoc() libraryXml.open('component', attributes={'name': 'libraryTable'}) libraryXml.open('library', attributes={'name': name}) libraryXml.open('CLASSES') pathX = mx.relpath_or_absolute(path, suite_dir, prefix='$PROJECT_DIR$') libraryXml.element('root', attributes={'url': 'jar://' + pathX + '!/'}) libraryXml.close('CLASSES') libraryXml.element('JAVADOC') if sourcePath: libraryXml.open('SOURCES') if os.path.isdir(sourcePath): sourcePathX = mx.relpath_or_absolute(sourcePath, suite_dir, prefix='$PROJECT_DIR$') libraryXml.element('root', attributes={'url': 'file://' + sourcePathX}) else: source_pathX = mx.relpath_or_absolute(source_path, suite_dir, prefix='$PROJECT_DIR$') libraryXml.element('root', attributes={'url': 'jar://' + source_pathX + '!/'}) libraryXml.close('SOURCES') else: libraryXml.element('SOURCES') libraryXml.close('library') libraryXml.close('component') libraryFile = join(librariesDirectory, _intellij_library_file_name(name, unique_library_file_names)) return mx.update_file(libraryFile, libraryXml.xml(indent=' ', newl='\n')) # Setup the libraries that were used above for library in libraries: sourcePath = None if library.isLibrary(): path = library.get_path(True) if library.sourcePath: sourcePath = library.get_source_path(True) elif library.isMavenProject(): path = library.get_path(True) sourcePath = library.get_source_path(True) elif library.isJARDistribution(): path = library.path if library.sourcesPath: sourcePath = library.sourcesPath elif library.isClasspathDependency(): path = library.classpath_repr() else: mx.abort('Dependency not supported: {} ({})'.format(library.name, library.__class__.__name__)) make_library(library.name, path, sourcePath, s.dir) jdk = mx.get_jdk() updated = False for library in jdk_libraries: if library.classpath_repr(jdk) is not None: if make_library(library.name, library.classpath_repr(jdk), library.get_source_path(jdk), s.dir): updated = True if jdk_libraries and updated: mx.log("Setting up JDK libraries using {0}".format(jdk)) # Set annotation processor profiles up, and link them to modules in compiler.xml compilerXml.open('component', attributes={'name': 'CompilerConfiguration'}) compilerXml.element('option', attributes={'name': "DEFAULT_COMPILER", 'value': 'Javac'}) # using the --release option with javac interferes with using --add-modules which is required for some projects compilerXml.element('option', attributes={'name': "USE_RELEASE_OPTION", 'value': 'false'}) compilerXml.element('resourceExtensions') compilerXml.open('wildcardResourcePatterns') compilerXml.element('entry', attributes={'name': '!?*.java'}) compilerXml.close('wildcardResourcePatterns') if annotationProcessorProfiles: compilerXml.open('annotationProcessing') for t, modules in sorted(annotationProcessorProfiles.items()): source_gen_dir = t[0] processors = t[1:] compilerXml.open('profile', attributes={'default': 'false', 'name': '-'.join([ap.name for ap in processors]) + "-" + source_gen_dir, 'enabled': 'true'}) compilerXml.element('sourceOutputDir', attributes={'name': join(os.pardir, source_gen_dir)}) compilerXml.element('sourceTestOutputDir', attributes={'name': join(os.pardir, source_gen_dir)}) compilerXml.open('processorPath', attributes={'useClasspath': 'false'}) # IntelliJ supports both directories and jars on the annotation processor path whereas # Eclipse only supports jars. for apDep in processors: def processApDep(dep, edge): if dep.isLibrary() or dep.isJARDistribution(): compilerXml.element('entry', attributes={'name': mx.relpath_or_absolute(dep.path, s.dir, prefix='$PROJECT_DIR$')}) elif dep.isProject(): compilerXml.element('entry', attributes={'name': mx.relpath_or_absolute(dep.output_dir(), s.dir, prefix='$PROJECT_DIR$')}) apDep.walk_deps(visit=processApDep) compilerXml.close('processorPath') for module in modules: compilerXml.element('module', attributes={'name': module.name}) compilerXml.close('profile') compilerXml.close('annotationProcessing') compilerXml.close('component') if compilerXml: compilerXml.close('project') compilerFile = join(ideaProjectDirectory, 'compiler.xml') mx.update_file(compilerFile, compilerXml.xml(indent=' ', newl='\n')) if not module_files_only: # Write misc.xml for global JDK config miscXml = mx.XMLDoc() miscXml.open('project', attributes={'version' : '4'}) if java_modules: mainJdk = mx.get_jdk() miscXml.open('component', attributes={'name' : 'ProjectRootManager', 'version': '2', 'languageLevel': _complianceToIntellijLanguageLevel(mainJdk.javaCompliance), 'project-jdk-name': intellij_get_java_sdk_name(sdks, mainJdk), 'project-jdk-type': intellij_java_sdk_type}) miscXml.element('output', attributes={'url' : 'file://$PROJECT_DIR$/' + os.path.relpath(s.get_output_root(), s.dir)}) miscXml.close('component') else: miscXml.element('component', attributes={'name' : 'ProjectRootManager', 'version': '2', 'project-jdk-name': intellij_get_python_sdk_name(sdks), 'project-jdk-type': intellij_python_sdk_type}) miscXml.close('project') miscFile = join(ideaProjectDirectory, 'misc.xml') mx.update_file(miscFile, miscXml.xml(indent=' ', newl='\n')) # Generate a default configuration for debugging Graal runConfig = mx.XMLDoc() runConfig.open('component', attributes={'name' : 'ProjectRunConfigurationManager'}) runConfig.open('configuration', attributes={'default' :'false', 'name' : 'GraalDebug', 'type' : 'Remote', 'factoryName': 'Remote'}) runConfig.element('option', attributes={'name' : 'USE_SOCKET_TRANSPORT', 'value' : 'true'}) runConfig.element('option', attributes={'name' : 'SERVER_MODE', 'value' : 'false'}) runConfig.element('option', attributes={'name' : 'SHMEM_ADDRESS', 'value' : 'javadebug'}) runConfig.element('option', attributes={'name' : 'HOST', 'value' : 'localhost'}) runConfig.element('option', attributes={'name' : 'PORT', 'value' : '8000'}) runConfig.open('RunnerSettings', attributes={'RunnerId' : 'Debug'}) runConfig.element('option', attributes={'name' : 'DEBUG_PORT', 'value' : '8000'}) runConfig.element('option', attributes={'name' : 'LOCAL', 'value' : 'false'}) runConfig.close('RunnerSettings') runConfig.element('method') runConfig.close('configuration') runConfig.close('component') runConfigFile = join(ideaProjectDirectory, 'runConfigurations', 'GraalDebug.xml') mx.ensure_dir_exists(join(ideaProjectDirectory, 'runConfigurations')) mx.update_file(runConfigFile, runConfig.xml(indent=' ', newl='\n')) if java_modules: # Eclipse formatter config corePrefsSources = s.eclipse_settings_sources().get('org.eclipse.jdt.core.prefs') uiPrefsSources = s.eclipse_settings_sources().get('org.eclipse.jdt.ui.prefs') if corePrefsSources: miscXml = mx.XMLDoc() miscXml.open('project', attributes={'version' : '4'}) out = StringIO() print('# GENERATED -- DO NOT EDIT', file=out) for source in corePrefsSources: print('# Source:', source, file=out) with open(source) as fileName: for line in fileName: if line.startswith('org.eclipse.jdt.core.formatter.'): print(line.strip(), file=out) formatterConfigFile = join(ideaProjectDirectory, 'EclipseCodeFormatter.prefs') mx.update_file(formatterConfigFile, out.getvalue()) importConfigFile = None if uiPrefsSources: out = StringIO() print('# GENERATED -- DO NOT EDIT', file=out) for source in uiPrefsSources: print('# Source:', source, file=out) with open(source) as fileName: for line in fileName: if line.startswith('org.eclipse.jdt.ui.importorder') \ or line.startswith('org.eclipse.jdt.ui.ondemandthreshold') \ or line.startswith('org.eclipse.jdt.ui.staticondemandthreshold'): print(line.strip(), file=out) importConfigFile = join(ideaProjectDirectory, 'EclipseImports.prefs') mx.update_file(importConfigFile, out.getvalue()) miscXml.open('component', attributes={'name' : 'EclipseCodeFormatterProjectSettings'}) miscXml.open('option', attributes={'name' : 'projectSpecificProfile'}) miscXml.open('ProjectSpecificProfile') miscXml.element('option', attributes={'name' : 'formatter', 'value' : 'ECLIPSE'}) custom_eclipse_exe = mx.get_env('ECLIPSE_EXE') if custom_eclipse_exe: custom_eclipse = dirname(custom_eclipse_exe) if mx.is_darwin(): custom_eclipse = join(dirname(custom_eclipse), 'Eclipse') if not exists(custom_eclipse_exe): mx.abort('Custom eclipse "{}" does not exist'.format(custom_eclipse_exe)) miscXml.element('option', attributes={'name' : 'eclipseVersion', 'value' : 'CUSTOM'}) miscXml.element('option', attributes={'name' : 'pathToEclipse', 'value' : custom_eclipse}) miscXml.element('option', attributes={'name' : 'pathToConfigFileJava', 'value' : '$PROJECT_DIR$/.idea/' + basename(formatterConfigFile)}) if importConfigFile: miscXml.element('option', attributes={'name' : 'importOrderConfigFilePath', 'value' : '$PROJECT_DIR$/.idea/' + basename(importConfigFile)}) miscXml.element('option', attributes={'name' : 'importOrderFromFile', 'value' : 'true'}) miscXml.close('ProjectSpecificProfile') miscXml.close('option') miscXml.close('component') miscXml.close('project') miscFile = join(ideaProjectDirectory, 'eclipseCodeFormatter.xml') mx.update_file(miscFile, miscXml.xml(indent=' ', newl='\n')) if java_modules: # Write codestyle settings mx.ensure_dir_exists(join(ideaProjectDirectory, 'codeStyles')) codeStyleConfigXml = mx.XMLDoc() codeStyleConfigXml.open('component', attributes={'name': 'ProjectCodeStyleConfiguration'}) codeStyleConfigXml.open('state') codeStyleConfigXml.element('option', attributes={'name': 'USE_PER_PROJECT_SETTINGS', 'value': 'true'}) codeStyleConfigXml.close('state') codeStyleConfigXml.close('component') codeStyleConfigFile = join(ideaProjectDirectory, 'codeStyles', 'codeStyleConfig.xml') mx.update_file(codeStyleConfigFile, codeStyleConfigXml.xml(indent=' ', newl='\n')) codeStyleProjectXml = mx.XMLDoc() codeStyleProjectXml.open('component', attributes={'name': 'ProjectCodeStyleConfiguration'}) codeStyleProjectXml.open('code_scheme', attributes={'name': 'Project', 'version': '173'}) codeStyleProjectXml.open('JavaCodeStyleSettings') # We cannot entirely disable wildcards import, but we can set the threshold to an insane number. codeStyleProjectXml.element('option', attributes={'name': 'CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND', 'value': '65536'}) codeStyleProjectXml.element('option', attributes={'name': 'NAMES_COUNT_TO_USE_IMPORT_ON_DEMAND', 'value': '65536'}) codeStyleProjectXml.close('JavaCodeStyleSettings') codeStyleProjectXml.close('code_scheme') codeStyleProjectXml.close('component') codeStyleProjectFile = join(ideaProjectDirectory, 'codeStyles', 'Project.xml') mx.update_file(codeStyleProjectFile, codeStyleProjectXml.xml(indent=' ', newl='\n')) # Write checkstyle-idea.xml for the CheckStyle-IDEA checkstyleXml = mx.XMLDoc() checkstyleXml.open('project', attributes={'version': '4'}) checkstyleXml.open('component', attributes={'name': 'CheckStyle-IDEA'}) checkstyleXml.open('option', attributes={'name' : "configuration"}) checkstyleXml.open('map') if max_checkstyle_version: checkstyleXml.element('entry', attributes={'key': "checkstyle-version", 'value': str(max_checkstyle_version)}) # Initialize an entry for each style that is used checkstyleConfigs = set([]) for p in s.projects_recursive(): if not p.isJavaProject(): continue csConfig, checkstyleVersion, checkstyleProj = p.get_checkstyle_config() if not csConfig or csConfig in checkstyleConfigs: continue checkstyleConfigs.add(csConfig) checkstyleXml.element('entry', attributes={'key' : "location-" + str(len(checkstyleConfigs)), 'value': "PROJECT_RELATIVE:" + join(checkstyleProj.dir, ".checkstyle_checks.xml") + ":" + checkstyleProj.name}) checkstyleXml.close('map') checkstyleXml.close('option') checkstyleXml.close('component') checkstyleXml.close('project') checkstyleFile = join(ideaProjectDirectory, 'checkstyle-idea.xml') mx.update_file(checkstyleFile, checkstyleXml.xml(indent=' ', newl='\n')) # mx integration def antTargetName(dist): return 'archive_' + dist.name def artifactFileName(dist): return dist.name.replace('.', '_').replace('-', '_') + '.xml' validDistributions = [dist for dist in mx.sorted_dists() if not dist.suite.isBinarySuite() and not dist.isTARDistribution()] # 1) Make an ant file for archiving the distributions. antXml = mx.XMLDoc() antXml.open('project', attributes={'name': s.name, 'default': 'archive'}) for dist in validDistributions: antXml.open('target', attributes={'name': antTargetName(dist)}) antXml.open('exec', attributes={'executable': sys.executable}) antXml.element('arg', attributes={'value': join(mx._mx_home, 'mx.py')}) antXml.element('arg', attributes={'value': 'archive'}) antXml.element('arg', attributes={'value': '@' + dist.name}) antXml.close('exec') antXml.close('target') antXml.close('project') antFile = join(ideaProjectDirectory, 'ant-mx-archive.xml') mx.update_file(antFile, antXml.xml(indent=' ', newl='\n')) # 2) Tell IDEA that there is an ant-build. ant_mx_archive_xml = 'file://$PROJECT_DIR$/.idea/ant-mx-archive.xml' metaAntXml = mx.XMLDoc() metaAntXml.open('project', attributes={'version': '4'}) metaAntXml.open('component', attributes={'name': 'AntConfiguration'}) metaAntXml.open('buildFile', attributes={'url': ant_mx_archive_xml}) metaAntXml.close('buildFile') metaAntXml.close('component') metaAntXml.close('project') metaAntFile = join(ideaProjectDirectory, 'ant.xml') mx.update_file(metaAntFile, metaAntXml.xml(indent=' ', newl='\n')) # 3) Make an artifact for every distribution validArtifactNames = {artifactFileName(dist) for dist in validDistributions} artifactsDir = join(ideaProjectDirectory, 'artifacts') mx.ensure_dir_exists(artifactsDir) for fileName in os.listdir(artifactsDir): filePath = join(artifactsDir, fileName) if os.path.isfile(filePath) and fileName not in validArtifactNames: os.remove(filePath) for dist in validDistributions: artifactXML = mx.XMLDoc() artifactXML.open('component', attributes={'name': 'ArtifactManager'}) artifactXML.open('artifact', attributes={'build-on-make': 'true', 'name': dist.name}) artifactXML.open('output-path', data='$PROJECT_DIR$/mxbuild/artifacts/' + dist.name) artifactXML.close('output-path') artifactXML.open('properties', attributes={'id': 'ant-postprocessing'}) artifactXML.open('options', attributes={'enabled': 'true'}) artifactXML.open('file', data=ant_mx_archive_xml) artifactXML.close('file') artifactXML.open('target', data=antTargetName(dist)) artifactXML.close('target') artifactXML.close('options') artifactXML.close('properties') artifactXML.open('root', attributes={'id': 'root'}) for javaProject in [dep for dep in dist.archived_deps() if dep.isJavaProject()]: artifactXML.element('element', attributes={'id': 'module-output', 'name': javaProject.name}) for javaProject in [dep for dep in dist.deps if dep.isLibrary() or dep.isDistribution()]: artifactXML.element('element', attributes={'id': 'artifact', 'artifact-name': javaProject.name}) artifactXML.close('root') artifactXML.close('artifact') artifactXML.close('component') artifactFile = join(artifactsDir, artifactFileName(dist)) mx.update_file(artifactFile, artifactXML.xml(indent=' ', newl='\n')) def intellij_scm_name(vc_kind): if vc_kind == 'git': return 'Git' elif vc_kind == 'hg': return 'hg4idea' vcsXml = mx.XMLDoc() vcsXml.open('project', attributes={'version': '4'}) vcsXml.open('component', attributes={'name': 'VcsDirectoryMappings'}) suites_for_vcs = mx.suites() + ([mx._mx_suite] if mx_python_modules else []) sourceSuitesWithVCS = [vc_suite for vc_suite in suites_for_vcs if vc_suite.isSourceSuite() and vc_suite.vc is not None] uniqueSuitesVCS = {(vc_suite.vc_dir, vc_suite.vc.kind) for vc_suite in sourceSuitesWithVCS} for vcs_dir, kind in uniqueSuitesVCS: vcsXml.element('mapping', attributes={'directory': vcs_dir, 'vcs': intellij_scm_name(kind)}) vcsXml.close('component') vcsXml.close('project') vcsFile = join(ideaProjectDirectory, 'vcs.xml') mx.update_file(vcsFile, vcsXml.xml(indent=' ', newl='\n'))
def makegraaljdk(args): """make a JDK with Graal as the default top level JIT""" parser = ArgumentParser(prog='mx makegraaljdk') parser.add_argument('-f', '--force', action='store_true', help='overwrite existing GraalJDK') parser.add_argument('-a', '--archive', action='store', help='name of archive to create', metavar='<path>') parser.add_argument('dest', help='destination directory for GraalJDK', metavar='<path>') args = parser.parse_args(args) if isJDK8: dstJdk = os.path.abspath(args.dest) srcJdk = jdk.home if exists(dstJdk): if args.force: shutil.rmtree(dstJdk) else: mx.abort('Use --force to overwrite existing directory ' + dstJdk) mx.log('Creating {} from {}'.format(dstJdk, srcJdk)) shutil.copytree(srcJdk, dstJdk) bootDir = mx.ensure_dir_exists(join(dstJdk, 'jre', 'lib', 'boot')) jvmciDir = join(dstJdk, 'jre', 'lib', 'jvmci') assert exists(jvmciDir), jvmciDir + ' does not exist' if mx.get_os() == 'darwin' or mx.get_os() == 'windows': jvmlibDir = join(dstJdk, 'jre', 'lib', 'server') else: jvmlibDir = join(dstJdk, 'jre', 'lib', mx.get_arch(), 'server') jvmlib = join(jvmlibDir, mx.add_lib_prefix(mx.add_lib_suffix('jvm'))) assert exists(jvmlib), jvmlib + ' does not exist' with open(join(jvmciDir, 'compiler-name'), 'w') as fp: print >> fp, 'graal' vmName = 'Graal' mapFiles = set() for e in _jvmci_classpath: src = basename(e.get_path()) mx.log('Copying {} to {}'.format(e.get_path(), jvmciDir)) candidate = e.get_path() + '.map' if exists(candidate): mapFiles.add(candidate) with open(join(dstJdk, 'release'), 'a') as fp: d = e.dist() s = d.suite print >> fp, '{}={}'.format(d.name, s.vc.parent(s.dir)) vmName = vmName + ':' + s.name + '_' + s.version() shutil.copyfile(e.get_path(), join(jvmciDir, src)) for e in _bootclasspath_appends: src = basename(e.classpath_repr()) mx.log('Copying {} to {}'.format(e.classpath_repr(), bootDir)) candidate = e.classpath_repr() + '.map' if exists(candidate): mapFiles.add(candidate) with open(join(dstJdk, 'release'), 'a') as fp: s = e.suite print >> fp, '{}={}'.format(e.name, s.vc.parent(s.dir)) shutil.copyfile(e.classpath_repr(), join(bootDir, src)) out = mx.LinesOutputCapture() mx.run([jdk.java, '-version'], err=out) line = None pattern = re.compile(r'(.* )(?:Server|Graal) VM \(build.*') for line in out.lines: m = pattern.match(line) if m: with open(join(jvmlibDir, 'vm.properties'), 'w') as fp: # Modify VM name in `java -version` to be Graal along # with a suffix denoting the commit of each Graal jar. # For example: # Java HotSpot(TM) 64-Bit Graal:compiler_88847fb25d1a62977a178331a5e78fa5f8fcbb1a (build 25.71-b01-internal-jvmci-0.34, mixed mode) print >> fp, 'name=' + m.group(1) + vmName line = True break if line is not True: mx.abort('Could not find "{}" in output of `java -version`:\n{}'.format(pattern.pattern, os.linesep.join(out.lines))) exe = join(dstJdk, 'bin', mx.exe_suffix('java')) with StdoutUnstripping(args=[], out=None, err=None, mapFiles=mapFiles) as u: mx.run([exe, '-XX:+BootstrapJVMCI', '-version'], out=u.out, err=u.err) if args.archive: mx.log('Archiving {}'.format(args.archive)) create_archive(dstJdk, args.archive, basename(args.dest) + '/') else: mx.abort('Can only make GraalJDK for JDK 8 currently')
def build(self): mx.ensure_dir_exists(self.subject.get_output_root()) for result, _, _ in self.subject.launchers(): with open(result, "w") as f: f.write(self.contents(result)) os.chmod(result, 0o755)
def jlink_new_jdk(jdk, dst_jdk_dir, module_dists, root_module_names=None, missing_export_target_action='create', with_source=lambda x: True): """ Uses jlink from `jdk` to create a new JDK image in `dst_jdk_dir` with `module_dists` and their dependencies added to the JDK image, replacing any existing modules of the same name. :param JDKConfig jdk: source JDK :param str dst_jdk_dir: path to use for the jlink --output option :param list module_dists: list of distributions defining modules :param list root_module_names: list of strings naming the module root set for the new JDK image. The named modules must either be in `module_dists` or in `jdk`. If None, then the root set will be all the modules in ``module_dists` and `jdk`. :param str missing_export_target_action: the action to perform for a qualifed export target that is not present in `module_dists` and does not have a hash stored in java.base. The choices are: "create" - an empty module is created "error" - raise an error None - do nothing :param lambda with_source: returns True if the sources of a module distribution must be included in the new JDK """ assert callable(with_source) if jdk.javaCompliance < '9': mx.abort('Cannot derive a new JDK from ' + jdk.home + ' with jlink since it is not JDK 9 or later') exploded_java_base_module = join(jdk.home, 'modules', 'java.base') if exists(exploded_java_base_module): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since it appears to be a developer build with exploded modules') jimage = join(jdk.home, 'lib', 'modules') jmods_dir = join(jdk.home, 'jmods') if not isfile(jimage): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jimage + ' is missing or is not an ordinary file') if not isdir(jmods_dir): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jmods_dir + ' is missing or is not a directory') jdk_modules = {jmd.name : jmd for jmd in jdk.get_modules()} modules = [as_java_module(dist, jdk) for dist in module_dists] all_module_names = frozenset(list(jdk_modules.keys()) + [m.name for m in modules]) # Read hashes stored in java.base (the only module in the JDK where hashes are stored) out = mx.LinesOutputCapture() mx.run([jdk.exe_path('jmod'), 'describe', jdk_modules['java.base'].get_jmod_path()], out=out) lines = out.lines hashes = {} for line in lines: if line.startswith('hashes'): parts = line.split() assert len(parts) == 4, 'expected hashes line to have 4 fields, got {} fields: {}'.format(len(parts), line) _, module_name, algorithm, hash_value = parts hashes[module_name] = (algorithm, hash_value) build_dir = mx.ensure_dir_exists(join(dst_jdk_dir + ".build")) try: # Handle targets of qualified exports that are not present in `modules` target_requires = {} for jmd in modules: for targets in jmd.exports.values(): for target in targets: if target not in all_module_names and target not in hashes: target_requires.setdefault(target, set()).add(jmd.name) if target_requires and missing_export_target_action is not None: if missing_export_target_action == 'error': mx.abort('Target(s) of qualified exports cannot be resolved: ' + '.'.join(target_requires.keys())) assert missing_export_target_action == 'create', 'invalid value for missing_export_target_action: ' + str(missing_export_target_action) extra_modules = [] for name, requires in target_requires.items(): module_jar = join(build_dir, name + '.jar') jmd = JavaModuleDescriptor(name, {}, requires={module : [] for module in requires}, uses=set(), provides={}, jarpath=module_jar) extra_modules.append(jmd) module_build_dir = mx.ensure_dir_exists(join(build_dir, name)) module_info_java = join(module_build_dir, 'module-info.java') module_info_class = join(module_build_dir, 'module-info.class') with open(module_info_java, 'w') as fp: print(jmd.as_module_info(), file=fp) mx.run([jdk.javac, '-d', module_build_dir, \ '--limit-modules=java.base,' + ','.join(jmd.requires.keys()), \ '--module-path=' + os.pathsep.join((m.jarpath for m in modules)), \ module_info_java]) with ZipFile(module_jar, 'w') as zf: zf.write(module_info_class, basename(module_info_class)) if exists(jmd.get_jmod_path()): os.remove(jmd.get_jmod_path()) mx.run([jdk.javac.replace('javac', 'jmod'), 'create', '--class-path=' + module_build_dir, jmd.get_jmod_path()]) modules.extend(extra_modules) all_module_names = frozenset(list(jdk_modules.keys()) + [m.name for m in modules]) # Extract src.zip from source JDK jdk_src_zip = join(jdk.home, 'lib', 'src.zip') dst_src_zip_contents = {} if isfile(jdk_src_zip): mx.logv('[Extracting ' + jdk_src_zip + ']') with ZipFile(jdk_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[name] = zf.read(name) else: mx.warn("'{}' does not exist or is not a file".format(jdk_src_zip)) for jmd in modules: # Remove existing sources for all the modules that we include dst_src_zip_contents = {key : dst_src_zip_contents[key] for key in dst_src_zip_contents if not key.startswith(jmd.name)} if with_source(jmd.dist): # Add the sources that we can share. # Extract module sources jmd_src_zip = jmd.jarpath[0:-len('.jar')] + '.src.zip' if isfile(jmd_src_zip): mx.logv('[Extracting ' + jmd_src_zip + ']') with ZipFile(jmd_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[jmd.name + '/' + name] = zf.read(name) # Add module-info.java to sources dst_src_zip_contents[jmd.name + '/module-info.java'] = jmd.as_module_info(extras_as_comments=False) # Now build the new JDK image with jlink jlink = [jdk.javac.replace('javac', 'jlink')] if jdk_enables_jvmci_by_default(jdk): # On JDK 9+, +EnableJVMCI forces jdk.internal.vm.ci to be in the root set jlink.append('-J-XX:-EnableJVMCI') if root_module_names is not None: missing = frozenset(root_module_names) - all_module_names if missing: mx.abort('Invalid module(s): {}.\nAvailable modules: {}'.format(','.join(missing), ','.join(sorted(all_module_names)))) jlink.append('--add-modules=' + ','.join(root_module_names)) else: jlink.append('--add-modules=' + ','.join(sorted(all_module_names))) module_path = jmods_dir if modules: module_path = os.pathsep.join((m.get_jmod_path(respect_stripping=True) for m in modules)) + os.pathsep + module_path jlink.append('--module-path=' + module_path) jlink.append('--output=' + dst_jdk_dir) # These options are inspired by how OpenJDK runs jlink to produce the final runtime image. jlink.extend(['-J-XX:+UseSerialGC', '-J-Xms32M', '-J-Xmx512M', '-J-XX:TieredStopAtLevel=1']) jlink.append('-J-Dlink.debug=true') jlink.append('--dedup-legal-notices=error-if-not-same-content') jlink.append('--keep-packaged-modules=' + join(dst_jdk_dir, 'jmods')) # TODO: investigate the options below used by OpenJDK to see if they should be used: # --release-info: this allow extra properties to be written to the <jdk>/release file # --order-resources: specifies order of resources in generated lib/modules file. # This is apparently not so important if a CDS archive is available. # --generate-jli-classes: pre-generates a set of java.lang.invoke classes. # See https://github.com/openjdk/jdk/blob/master/make/GenerateLinkOptData.gmk mx.logv('[Creating JDK image]') mx.run(jlink) dst_src_zip = join(dst_jdk_dir, 'lib', 'src.zip') mx.logv('[Creating ' + dst_src_zip + ']') with ZipFile(dst_src_zip, 'w', compression=ZIP_DEFLATED, allowZip64=True) as zf: for name, contents in sorted(dst_src_zip_contents.items()): zf.writestr(name, contents) mx.logv('[Copying static libraries]') lib_prefix = mx.add_lib_prefix('') lib_suffix = '.lib' if mx.is_windows() else '.a' lib_directory = join(jdk.home, 'lib') dst_lib_directory = join(dst_jdk_dir, 'lib') for f in os.listdir(lib_directory): if f.startswith(lib_prefix) and f.endswith(lib_suffix): lib_path = join(lib_directory, f) if isfile(lib_path): shutil.copy2(lib_path, dst_lib_directory) # Build the list of modules whose classes might have annotations # to be processed by native-image (GR-15192). with open(join(dst_jdk_dir, 'lib', 'native-image-modules.list'), 'w') as fp: print('# Modules whose classes might have annotations processed by native-image', file=fp) for m in modules: print(m.name, file=fp) finally: if not mx.get_opts().verbose: # Preserve build directory so that javac command can be re-executed # by cutting and pasting verbose output. shutil.rmtree(build_dir) # Create CDS archive (https://openjdk.java.net/jeps/341). out = mx.OutputCapture() mx.logv('[Creating CDS shared archive]') if mx.run([mx.exe_suffix(join(dst_jdk_dir, 'bin', 'java')), '-Xshare:dump', '-Xmx128M', '-Xms128M'], out=out, err=out, nonZeroIsFatal=False) != 0: mx.log(out.data) mx.abort('Error generating CDS shared archive')
def _netbeansinit_project(p, jdks=None, files=None, libFiles=None, dists=None): dists = [] if dists is None else dists nb_dir = mx.ensure_dir_exists(join(p.dir)) nbproject_dir = mx.ensure_dir_exists(join(nb_dir, 'nbproject')) jdk = mx.get_jdk(p.javaCompliance) assert jdk if jdks is not None: jdks.add(jdk) execDir = mx.primary_suite().dir out = mx.XMLDoc() out.open('project', {'name' : p.name, 'default' : 'default', 'basedir' : '.'}) out.element('description', data='Builds, tests, and runs the project ' + p.name + '.') out.element('available', {'file' : 'nbproject/build-impl.xml', 'property' : 'build.impl.exists'}) out.element('import', {'file' : 'nbproject/build-impl.xml', 'optional' : 'true'}) out.element('extension-point', {'name' : '-mx-init'}) out.element('available', {'file' : 'nbproject/build-impl.xml', 'property' : 'mx.init.targets', 'value' : 'init'}) out.element('property', {'name' : 'mx.init.targets', 'value' : ''}) out.element('bindtargets', {'extensionPoint' : '-mx-init', 'targets' : '${mx.init.targets}'}) out.open('target', {'name' : '-post-init'}) out.open('pathconvert', {'property' : 'comma.javac.classpath', 'pathsep' : ','}) out.element('path', {'path' : '${javac.classpath}'}) out.close('pathconvert') out.open('restrict', {'id' : 'missing.javac.classpath'}) out.element('filelist', {'dir' : '${basedir}', 'files' : '${comma.javac.classpath}'}) out.open('not') out.element('exists') out.close('not') out.close('restrict') out.element('property', {'name' : 'missing.javac.classpath', 'refid' : 'missing.javac.classpath'}) out.open('condition', {'property' : 'no.dependencies', 'value' : 'true'}) out.element('equals', {'arg1' : '${missing.javac.classpath}', 'arg2' : ''}) out.close('condition') out.element('property', {'name' : 'no.dependencies', 'value' : 'false'}) out.open('condition', {'property' : 'no.deps'}) out.element('equals', {'arg1' : '${no.dependencies}', 'arg2' : 'true'}) out.close('condition') out.close('target') out.open('target', {'name' : 'clean'}) out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true', 'dir' : execDir}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : 'clean'}) out.element('arg', {'value' : '--projects'}) out.element('arg', {'value' : p.name}) out.close('exec') out.close('target') out.open('target', {'name' : 'compile'}) out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true', 'dir' : execDir}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : 'build'}) dependsOn = p.name for d in dists: dependsOn = dependsOn + ',' + d.name out.element('arg', {'value' : '--only'}) out.element('arg', {'value' : dependsOn}) out.element('arg', {'value' : '--force-javac'}) out.element('arg', {'value' : '--no-native'}) out.element('arg', {'value' : '--no-daemon'}) out.close('exec') out.close('target') out.open('target', {'name' : 'package', 'if' : 'build.impl.exists'}) out.element('antcall', {'target': '-package', 'inheritall': 'true', 'inheritrefs': 'true'}) out.close('target') out.open('target', {'name' : '-package', 'depends' : '-mx-init'}) out.element('loadfile', {'srcFile' : join(p.suite.get_output_root(), 'netbeans.log'), 'property' : 'netbeans.log', 'failonerror' : 'false'}) out.element('echo', {'message' : '...truncated...${line.separator}', 'output' : join(p.suite.get_output_root(), 'netbeans.log')}) out.element('echo', {'message' : '${netbeans.log}'}) for d in dists: if d.isDistribution(): out.element('touch', {'file' : '${java.io.tmpdir}/' + d.name}) out.element('echo', {'message' : d.name + ' set to now${line.separator}', 'append' : 'true', 'output' : join(p.suite.get_output_root(), 'netbeans.log')}) out.open('copy', {'todir' : '${build.classes.dir}', 'overwrite' : 'true'}) out.element('resources', {'refid' : 'changed.files'}) out.close('copy') if len(p.annotation_processors()) > 0: out.open('copy', {'todir' : '${src.ap-source-output.dir}'}) out.open('fileset', {'dir': '${cos.src.dir.internal}/../sources/'}) out.element('include', {'name': '**/*.java'}) out.close('fileset') out.close('copy') out.open('exec', {'executable' : '${ant.home}/bin/ant', 'spawn' : 'true'}) out.element('arg', {'value' : '-f'}) out.element('arg', {'value' : '${ant.file}'}) out.element('arg', {'value' : 'packagelater'}) out.close('exec') out.close('target') for d in dists: if d.isDistribution(): out.open('target', {'name' : 'checkpackage-' + d.name}) out.open('tstamp') out.element('format', {'pattern' : 'S', 'unit' : 'millisecond', 'property' : 'at.' + d.name}) out.close('tstamp') out.element('touch', {'file' : '${java.io.tmpdir}/' + d.name, 'millis' : '${at.' + d.name + '}0000'}) out.element('echo', {'message' : d.name + ' touched to ${at.' + d.name + '}0000${line.separator}', 'append' : 'true', 'output' : join(p.suite.get_output_root(), 'netbeans.log')}) out.element('sleep', {'seconds' : '3'}) out.open('condition', {'property' : 'mx.' + d.name, 'value' : sys.executable}) out.open('islastmodified', {'millis' : '${at.' + d.name + '}0000', 'mode' : 'equals'}) out.element('file', {'file' : '${java.io.tmpdir}/' + d.name}) out.close('islastmodified') out.close('condition') out.element('echo', {'message' : d.name + ' defined as ' + '${mx.' + d.name + '}${line.separator}', 'append' : 'true', 'output' : join(p.suite.get_output_root(), 'netbeans.log')}) out.close('target') out.open('target', {'name' : 'packagelater-' + d.name, 'depends' : 'checkpackage-' + d.name, 'if' : 'mx.' + d.name}) out.open('exec', {'executable' : '${mx.' + d.name + '}', 'failonerror' : 'true', 'dir' : execDir, 'output' : join(p.suite.get_output_root(), 'netbeans.log'), 'append' : 'true'}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : 'build'}) out.element('arg', {'value' : '-f'}) out.element('arg', {'value' : '--only'}) out.element('arg', {'value' : d.name}) out.element('arg', {'value' : '--force-javac'}) out.element('arg', {'value' : '--no-native'}) out.element('arg', {'value' : '--no-daemon'}) out.close('exec') out.close('target') dependsOn = '' sep = '' for d in dists: dependsOn = dependsOn + sep + 'packagelater-' + d.name sep = ',' out.open('target', {'name' : 'packagelater', 'depends' : dependsOn}) out.close('target') out.open('target', {'name' : 'jar', 'depends' : 'compile'}) out.close('target') out.element('target', {'name' : 'test', 'depends' : 'run'}) out.element('target', {'name' : 'test-single', 'depends' : 'run'}) out.open('target', {'name' : 'run'}) out.element('property', {'name' : 'test.class', 'value' : p.name}) out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true', 'dir' : execDir}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : 'unittest'}) out.element('arg', {'value' : '${test.class}'}) out.close('exec') out.close('target') out.element('target', {'name' : 'debug-test', 'depends' : 'debug'}) out.open('target', {'name' : 'debug', 'depends' : '-mx-init'}) out.element('property', {'name' : 'test.class', 'value' : p.name}) out.open('nbjpdastart', {'addressproperty' : 'jpda.address', 'name' : p.name}) out.open('classpath') out.open('fileset', {'dir' : '..'}) out.element('include', {'name' : '*/bin/'}) out.close('fileset') out.close('classpath') out.open('sourcepath') out.element('pathelement', {'location' : 'src'}) out.close('sourcepath') out.close('nbjpdastart') out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true', 'dir' : execDir}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : '-d'}) out.element('arg', {'value' : '--attach'}) out.element('arg', {'value' : '${jpda.address}'}) out.element('arg', {'value' : 'unittest'}) out.element('arg', {'value' : '${test.class}'}) out.close('exec') out.close('target') out.open('target', {'name' : 'javadoc'}) out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true', 'dir' : execDir}) out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.home}) out.element('arg', {'value' : os.path.abspath(__file__)}) out.element('arg', {'value' : 'javadoc'}) out.element('arg', {'value' : '--projects'}) out.element('arg', {'value' : p.name}) out.element('arg', {'value' : '--force'}) out.close('exec') out.element('nbbrowse', {'file' : 'javadoc/index.html'}) out.close('target') out.close('project') mx.update_file(join(nb_dir, 'build.xml'), out.xml(indent='\t', newl='\n')) if files is not None: files.append(join(nb_dir, 'build.xml')) out = mx.XMLDoc() out.open('project', {'xmlns' : 'http://www.netbeans.org/ns/project/1'}) out.element('type', data='org.netbeans.modules.java.j2seproject') out.open('configuration') out.open('data', {'xmlns' : 'http://www.netbeans.org/ns/j2se-project/3'}) out.element('name', data=p.name) out.element('explicit-platform', {'explicit-source-supported' : 'true'}) out.open('source-roots') out.element('root', {'id' : 'src.dir'}) if len(p.annotation_processors()) > 0: out.element('root', {'id' : 'src.ap-source-output.dir', 'name' : 'Generated Packages'}) out.close('source-roots') out.open('test-roots') out.close('test-roots') out.close('data') firstDep = [] def processDep(dep, edge): if dep is p: return if dep.isProject(): n = dep.name.replace('.', '_') if not firstDep: out.open('references', {'xmlns' : 'http://www.netbeans.org/ns/ant-project-references/1'}) firstDep.append(dep) out.open('reference') out.element('foreign-project', data=n) out.element('artifact-type', data='jar') out.element('script', data='build.xml') out.element('target', data='jar') out.element('clean-target', data='clean') out.element('id', data='jar') out.close('reference') #pylint: disable=too-many-function-args p.walk_deps(visit=processDep, ignoredEdges=[mx.DEP_EXCLUDED]) if firstDep: out.close('references') out.close('configuration') out.close('project') mx.update_file(join(nbproject_dir, 'project.xml'), out.xml(indent=' ', newl='\n')) if files is not None: files.append(join(nbproject_dir, 'project.xml')) out = StringIO() jdkPlatform = 'JDK_' + str(jdk.version) annotationProcessorEnabled = "false" annotationProcessorSrcFolder = "" annotationProcessorSrcFolderRef = "" if len(p.annotation_processors()) > 0: annotationProcessorEnabled = "true" mx.ensure_dir_exists(p.source_gen_dir()) annotationProcessorSrcFolder = os.path.relpath(p.source_gen_dir(), nb_dir) annotationProcessorSrcFolder = annotationProcessorSrcFolder.replace('\\', '\\\\') annotationProcessorSrcFolderRef = "src.ap-source-output.dir=" + annotationProcessorSrcFolder canSymlink = not (mx.is_windows() or mx.is_cygwin()) and 'symlink' in dir(os) if canSymlink: nbBuildDir = join(nbproject_dir, 'build') apSourceOutRef = "annotation.processing.source.output=" + annotationProcessorSrcFolder if os.path.lexists(nbBuildDir): os.unlink(nbBuildDir) os.symlink(p.output_dir(), nbBuildDir) else: nbBuildDir = p.output_dir() apSourceOutRef = "" mx.ensure_dir_exists(p.output_dir()) mx_ide_eclipse._copy_eclipse_settings(nb_dir, p) content = """ annotation.processing.enabled=""" + annotationProcessorEnabled + """ annotation.processing.enabled.in.editor=""" + annotationProcessorEnabled + """ """ + apSourceOutRef + """ annotation.processing.processors.list= annotation.processing.run.all.processors=true application.title=""" + p.name + """ application.vendor=mx auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.eclipseFormatterActiveProfile= auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.eclipseFormatterEnabled=true auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.eclipseFormatterLocation= auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.enableFormatAsSaveAction=true auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.linefeed= auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.preserveBreakPoints=true auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.SaveActionModifiedLinesOnly=false auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.showNotifications=false auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.sourcelevel= auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.useProjectPref=true auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.useProjectSettings=true auxiliary.de-markiewb-netbeans-plugins-eclipse-formatter.eclipseFormatterActiveProfile= auxiliary.org-netbeans-spi-editor-hints-projects.perProjectHintSettingsEnabled=true auxiliary.org-netbeans-spi-editor-hints-projects.perProjectHintSettingsFile=nbproject/cfg_hints.xml build.classes.dir=${build.dir} build.classes.excludes=**/*.java,**/*.form # This directory is removed when the project is cleaned: build.dir=""" + nbBuildDir + """ $cos.update=package $cos.update.resources=changed.files compile.on.save=true build.generated.sources.dir=${build.dir}/generated-sources # Only compile against the classpath explicitly listed here: build.sysclasspath=ignore build.test.classes.dir=${build.dir}/test/classes build.test.results.dir=${build.dir}/test/results # Uncomment to specify the preferred debugger connection transport: #debug.transport=dt_socket debug.classpath=\\ ${run.classpath} debug.test.classpath=\\ ${run.test.classpath} # This directory is removed when the project is cleaned: dist.dir=dist dist.jar=${dist.dir}/""" + p.name + """.jar dist.javadoc.dir=${dist.dir}/javadoc endorsed.classpath= excludes= includes=** jar.compress=false java.main.action=test # Space-separated list of extra javac options javac.compilerargs=-XDignore.symbol.file javac.deprecation=false javac.source=""" + str(p.javaCompliance) + """ javac.target=""" + str(p.javaCompliance) + """ javac.test.classpath=\\ ${javac.classpath}:\\ ${build.classes.dir} javadoc.additionalparam= javadoc.author=false javadoc.encoding=${source.encoding} javadoc.noindex=false javadoc.nonavbar=false javadoc.notree=false javadoc.private=false javadoc.splitindex=true javadoc.use=true javadoc.version=false javadoc.windowtitle= manifest.file=manifest.mf meta.inf.dir=${src.dir}/META-INF mkdist.disabled=false platforms.""" + jdkPlatform + """.home=""" + jdk.home + """ platform.active=""" + jdkPlatform + """ run.classpath=\\ ${javac.classpath}:\\ ${build.classes.dir} # Space-separated list of JVM arguments used when running the project # (you may also define separate properties like run-sys-prop.name=value instead of -Dname=value # or test-sys-prop.name=value to set system properties for unit tests): run.jvmargs= run.test.classpath=\\ ${javac.test.classpath}:\\ ${build.test.classes.dir} test.src.dir=./test """ + annotationProcessorSrcFolderRef + """ source.encoding=UTF-8""".replace(':', os.pathsep).replace('/', os.sep) print(content, file=out) # Workaround for NetBeans "too clever" behavior. If you want to be # able to press F6 or Ctrl-F5 in NetBeans and run/debug unit tests # then the project must have its main.class property set to an # existing class with a properly defined main method. Until this # behavior is remedied, we specify a well known Truffle class # that will be on the class path for most Truffle projects. # This can be overridden by defining a netbeans.project.properties # attribute for a project in suite.py (see below). print("main.class=com.oracle.truffle.api.impl.Accessor", file=out) # Add extra properties specified in suite.py for this project if hasattr(p, 'netbeans.project.properties'): properties = getattr(p, 'netbeans.project.properties') for prop in [properties] if isinstance(properties, str) else properties: print(prop, file=out) mainSrc = True for src in p.srcDirs: srcDir = mx.ensure_dir_exists(join(p.dir, src)) ref = 'file.reference.' + p.name + '-' + src print(ref + '=' + os.path.relpath(srcDir, nb_dir), file=out) if mainSrc: print('src.dir=${' + ref + '}', file=out) mainSrc = False else: print('src.' + src + '.dir=${' + ref + '}', file=out) javacClasspath = [] def newDepsCollector(into): return lambda dep, edge: into.append(dep) if dep.isLibrary() or dep.isJdkLibrary() or dep.isProject() or dep.isClasspathDependency() else None deps = [] p.walk_deps(visit=newDepsCollector(deps)) annotationProcessorOnlyDeps = [] if len(p.annotation_processors()) > 0: for apDep in p.annotation_processors(): resolvedApDeps = [] apDep.walk_deps(visit=newDepsCollector(resolvedApDeps)) for resolvedApDep in resolvedApDeps: if not resolvedApDep in deps: deps.append(resolvedApDep) annotationProcessorOnlyDeps.append(resolvedApDep) annotationProcessorReferences = [] for dep in deps: if dep == p: continue if dep.isLibrary() or dep.isJdkLibrary(): if dep.isLibrary(): path = dep.get_path(resolve=True) sourcePath = dep.get_source_path(resolve=True) else: path = dep.classpath_repr(jdk, resolve=True) sourcePath = dep.get_source_path(jdk) if path: if os.sep == '\\': path = path.replace('\\', '\\\\') ref = 'file.reference.' + dep.name + '-bin' print(ref + '=' + path, file=out) if libFiles: libFiles.append(path) if sourcePath: if os.sep == '\\': sourcePath = sourcePath.replace('\\', '\\\\') print('source.reference.' + dep.name + '-bin=' + sourcePath, file=out) elif dep.isMavenProject(): path = dep.get_path(resolve=False) if path: if os.sep == '\\': path = path.replace('\\', '\\\\') ref = 'file.reference.' + dep.name + '-bin' print(ref + '=' + path, file=out) elif dep.isProject(): n = dep.name.replace('.', '_') relDepPath = os.path.relpath(dep.dir, nb_dir).replace(os.sep, '/') if canSymlink: depBuildPath = join('nbproject', 'build') else: depBuildPath = 'dist/' + dep.name + '.jar' ref = 'reference.' + n + '.jar' print('project.' + n + '=' + relDepPath, file=out) print(ref + '=${project.' + n + '}/' + depBuildPath, file=out) elif dep.isJreLibrary(): continue elif dep.isClasspathDependency(): extra = [di for di in dep.deps if di not in deps] if dep.isDistribution() and dep.deps and not extra: # ignore distribution classpath dependencies that only contain other explicit depedencies continue path = dep.classpath_repr(resolve=True) sourcePath = dep.get_source_path(jdk) if hasattr(dep, 'get_source_path') else None if path: if os.sep == '\\': path = path.replace('\\', '\\\\') ref = 'file.reference.' + dep.name + '-bin' print(ref + '=' + path, file=out) if libFiles: libFiles.append(path) if sourcePath: if os.sep == '\\': sourcePath = sourcePath.replace('\\', '\\\\') print('source.reference.' + dep.name + '-bin=' + sourcePath, file=out) if not dep in annotationProcessorOnlyDeps: javacClasspath.append('${' + ref + '}') else: annotationProcessorReferences.append('${' + ref + '}') print('javac.classpath=\\\n ' + (os.pathsep + '\\\n ').join(javacClasspath), file=out) print('javac.processorpath=' + (os.pathsep + '\\\n ').join(['${javac.classpath}'] + annotationProcessorReferences), file=out) print('javac.test.processorpath=' + (os.pathsep + '\\\n ').join(['${javac.test.classpath}'] + annotationProcessorReferences), file=out) mx.update_file(join(nbproject_dir, 'project.properties'), out.getvalue()) out.close() if files is not None: files.append(join(nbproject_dir, 'project.properties')) for source in p.suite.netbeans_settings_sources().get('cfg_hints.xml'): with open(source) as fp: content = fp.read() mx.update_file(join(nbproject_dir, 'cfg_hints.xml'), content) if files is not None: files.append(join(p.dir, 'nbproject', 'cfg_hints.xml'))
def _eclipseinit(self, files=None, libFiles=None): """ Generates an Eclipse project for each HotSpot build configuration. """ roots = [ 'ASSEMBLY_EXCEPTION', 'LICENSE', 'README', 'THIRD_PARTY_README', 'agent', 'make', 'src', 'test' ] for jvmVariant in _jdkJvmVariants: for debugLevel in _jdkDebugLevels: name = jvmVariant + '-' + debugLevel eclProjectDir = join(self.dir, 'eclipse', name) mx.ensure_dir_exists(eclProjectDir) out = mx.XMLDoc() out.open('projectDescription') out.element('name', data='hotspot:' + name) out.element('comment', data='') out.element('projects', data='') out.open('buildSpec') out.open('buildCommand') out.element('name', data='org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder') out.element('triggers', data='full,incremental') out.element('arguments', data='') out.close('buildCommand') out.close('buildSpec') out.open('natures') out.element('nature', data='org.eclipse.cdt.core.cnature') out.element('nature', data='org.eclipse.cdt.core.ccnature') out.element('nature', data='org.eclipse.cdt.managedbuilder.core.managedBuildNature') out.element('nature', data='org.eclipse.cdt.managedbuilder.core.ScannerConfigNature') out.close('natures') if roots: out.open('linkedResources') for r in roots: f = join(_suite.dir, r) out.open('link') out.element('name', data=r) out.element('type', data='2' if isdir(f) else '1') out.element('locationURI', data=mx.get_eclipse_project_rel_locationURI(f, eclProjectDir)) out.close('link') out.open('link') out.element('name', data='gensrc') out.element('type', data='2') generated = join(_get_hotspot_build_dir(jvmVariant, debugLevel), 'gensrc') out.element('locationURI', data=mx.get_eclipse_project_rel_locationURI(generated, eclProjectDir)) out.close('link') out.close('linkedResources') out.close('projectDescription') projectFile = join(eclProjectDir, '.project') mx.update_file(projectFile, out.xml(indent='\t', newl='\n')) if files: files.append(projectFile) cprojectTemplate = join(self.dir, 'templates', 'eclipse', 'cproject') cprojectFile = join(eclProjectDir, '.cproject') with open(cprojectTemplate) as f: content = f.read() mx.update_file(cprojectFile, content) if files: files.append(cprojectFile) settingsDir = join(eclProjectDir, ".settings") mx.ensure_dir_exists(settingsDir) for name, source in self._get_eclipse_settings_sources().iteritems(): out = StringIO.StringIO() print >> out, '# GENERATED -- DO NOT EDIT' print >> out, '# Source:', source with open(source) as f: print >> out, f.read() content = out.getvalue() mx.update_file(join(settingsDir, name), content) if files: files.append(join(settingsDir, name))
def jlink_new_jdk(jdk, dst_jdk_dir, module_dists, ignore_dists, root_module_names=None, missing_export_target_action='create', with_source=lambda x: True, vendor_info=None, dedup_legal_notices=True, use_upgrade_module_path=False): """ Uses jlink from `jdk` to create a new JDK image in `dst_jdk_dir` with `module_dists` and their dependencies added to the JDK image, replacing any existing modules of the same name. :param JDKConfig jdk: source JDK :param str dst_jdk_dir: path to use for the jlink --output option :param list module_dists: list of distributions defining modules :param list ignore_dists: list of distributions that should be ignored for missing_export_target_action :param list root_module_names: list of strings naming the module root set for the new JDK image. The named modules must either be in `module_dists` or in `jdk`. If None, then the root set will be all the modules in ``module_dists` and `jdk`. :param str missing_export_target_action: the action to perform for a qualified export target that is not present in `module_dists` and does not have a hash stored in java.base. The choices are: "create" - an empty module is created "error" - raise an error None - do nothing :param lambda with_source: returns True if the sources of a module distribution must be included in the new JDK :param dict vendor_info: values for the jlink vendor options added by JDK-8232080 :param bool use_upgrade_module_path: if True, then instead of linking `module_dists` into the image, resolve them via --upgrade-module-path at image runtime :return bool: False if use_upgrade_module_path == True and the existing image is up to date otherwise True """ assert callable(with_source) if jdk.javaCompliance < '9': mx.abort('Cannot derive a new JDK from ' + jdk.home + ' with jlink since it is not JDK 9 or later') exploded_java_base_module = join(jdk.home, 'modules', 'java.base') if exists(exploded_java_base_module): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since it appears to be a developer build with exploded modules') jimage = join(jdk.home, 'lib', 'modules') jmods_dir = join(jdk.home, 'jmods') if not isfile(jimage): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jimage + ' is missing or is not an ordinary file') if not isdir(jmods_dir): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jmods_dir + ' is missing or is not a directory') # Exclude jdk.aot due to GR-10545 and JDK-8255616 jdk_modules = {jmd.name: jmd for jmd in jdk.get_modules() if jmd.name != 'jdk.aot'} modules = [as_java_module(dist, jdk) for dist in module_dists] module_names = frozenset((m.name for m in modules)) all_module_names = frozenset(list(jdk_modules.keys())) | module_names # Read hashes stored in java.base (the only module in the JDK where hashes are stored) hashes = _read_java_base_hashes(jdk) build_dir = mx.ensure_dir_exists(join(dst_jdk_dir + ".build")) # Directory under dst_jdk_dir for artifacts related to use_upgrade_module_path upgrade_dir = join(dst_jdk_dir, 'upgrade_modules_support') # Map from JavaModuleDescriptors to post-jlink jar location. synthetic_modules = OrderedDict() try: ignore_module_names = set(mx_javamodules.get_module_name(mx.dependency(ignore_dist)) for ignore_dist in ignore_dists) # Synthesize modules for targets of qualified exports that are not present in `modules`. # Without this, runtime module resolution will fail due to missing modules. target_requires = {} for jmd in modules: for targets in jmd.exports.values(): for target in targets: if target not in all_module_names and target not in ignore_module_names and target not in hashes: target_requires.setdefault(target, set()).add(jmd.name) if target_requires and missing_export_target_action is not None: if missing_export_target_action == 'error': mx.abort('Target(s) of qualified exports cannot be resolved: ' + '.'.join(target_requires.keys())) assert missing_export_target_action == 'create', 'invalid value for missing_export_target_action: ' + str(missing_export_target_action) for name, requires in sorted(target_requires.items()): module_jar = join(build_dir, name + '.jar') jmd = JavaModuleDescriptor(name, {}, requires={module: [] for module in requires}, uses=set(), provides={}, jarpath=module_jar) module_build_dir = mx.ensure_dir_exists(join(build_dir, name)) module_info = jmd.as_module_info() module_info_java = join(module_build_dir, 'module-info.java') module_info_class = join(module_build_dir, 'module-info.class') dst_module_jar = join(upgrade_dir, name + '.jar') synthetic_modules[jmd] = dst_module_jar if use_upgrade_module_path and exists(dst_module_jar): with ZipFile(dst_module_jar, 'r') as zf: previous_module_info = zf.read('module-info.java').decode() if previous_module_info == module_info: mx.logv('[Reusing synthetic module {}]'.format(name)) os.rename(dst_module_jar, module_jar) continue mx.logv('[Rebuilding synthetic module {} as module descriptor changed]'.format(name)) with open(module_info_java, 'w') as fp: fp.write(module_info) mx.run([jdk.javac, '-d', module_build_dir, '--limit-modules=java.base,' + ','.join(jmd.requires.keys()), '--module-path=' + os.pathsep.join((m.jarpath for m in modules)), module_info_java]) with ZipFile(module_jar, 'w') as zf: zf.write(module_info_java, 'module-info.java') zf.write(module_info_class, 'module-info.class') if exists(jmd.get_jmod_path()): os.remove(jmd.get_jmod_path()) if not use_upgrade_module_path: mx.run([jdk.javac.replace('javac', 'jmod'), 'create', '--class-path=' + module_build_dir, jmd.get_jmod_path()]) modules.extend(synthetic_modules.keys()) module_names = frozenset((m.name for m in modules)) all_module_names = frozenset(list(jdk_modules.keys())) | module_names # Edit lib/security/default.policy in java.base patched_java_base = _patch_default_security_policy(build_dir, jmods_dir, dst_jdk_dir) # Now build the new JDK image with jlink jlink = [jdk.javac.replace('javac', 'jlink')] if jdk_enables_jvmci_by_default(jdk): # On JDK 9+, +EnableJVMCI forces jdk.internal.vm.ci to be in the root set jlink += ['-J-XX:-EnableJVMCI', '-J-XX:-UseJVMCICompiler'] jlink.append('--add-modules=' + ','.join(_get_image_root_modules(root_module_names, module_names, jdk_modules.keys(), use_upgrade_module_path))) module_path = patched_java_base + os.pathsep + jmods_dir if modules and not use_upgrade_module_path: module_path = os.pathsep.join((m.get_jmod_path(respect_stripping=True) for m in modules)) + os.pathsep + module_path jlink.append('--module-path=' + module_path) jlink.append('--output=' + dst_jdk_dir) # These options are derived from how OpenJDK runs jlink to produce the final runtime image. jlink.extend(['-J-XX:+UseSerialGC', '-J-Xms32M', '-J-Xmx512M', '-J-XX:TieredStopAtLevel=1']) jlink.append('-J-Dlink.debug=true') if dedup_legal_notices: jlink.append('--dedup-legal-notices=error-if-not-same-content') jlink.append('--keep-packaged-modules=' + join(dst_jdk_dir, 'jmods')) vm_options_path = join(upgrade_dir, 'vm_options') vm_options = _get_image_vm_options(jdk, use_upgrade_module_path, modules, synthetic_modules) if vm_options: jlink.append('--add-options=' + ' '.join(vm_options)) if jdk_has_new_jlink_options(jdk) and vendor_info is not None: for name, value in vendor_info.items(): jlink.append('--' + name + '=' + value) release_file = join(jdk.home, 'release') if isfile(release_file): jlink.append('--release-info=' + release_file) if exists(dst_jdk_dir): if use_upgrade_module_path and _vm_options_match(vm_options, vm_options_path): mx.logv('[Existing JDK image {} is up to date]'.format(dst_jdk_dir)) return False mx.rmtree(dst_jdk_dir) # TODO: investigate the options below used by OpenJDK to see if they should be used: # --order-resources: specifies order of resources in generated lib/modules file. # This is apparently not so important if a CDS archive is available. # --generate-jli-classes: pre-generates a set of java.lang.invoke classes. # See https://github.com/openjdk/jdk/blob/master/make/GenerateLinkOptData.gmk mx.logv('[Creating JDK image in {}]'.format(dst_jdk_dir)) mx.run(jlink) if use_upgrade_module_path: # Move synthetic upgrade modules into final location for jmd, jarpath in synthetic_modules.items(): mx.ensure_dir_exists(dirname(jarpath)) os.rename(jmd.jarpath, jarpath) # Persist VM options cooked into image to be able to skip a subsequent # jlink execution if the options do not change. with open(vm_options_path, 'w') as fp: fp.write(os.linesep.join(vm_options)) # Create src.zip in new JDK image _copy_src_zip(jdk.home, dst_jdk_dir, modules, lambda jmd: not use_upgrade_module_path and with_source(jmd.dist)) mx.logv('[Copying static libraries]') lib_directory = join(jdk.home, 'lib', 'static') if exists(lib_directory): dst_lib_directory = join(dst_jdk_dir, 'lib', 'static') try: mx.copytree(lib_directory, dst_lib_directory) except shutil.Error as e: # On AArch64, there can be a problem in the copystat part # of copytree which occurs after file and directory copying # has successfully completed. Since the metadata doesn't # matter in this case, just ensure that the content was copied. for root, _, lib_files in os.walk(lib_directory): relative_root = os.path.relpath(root, dst_lib_directory) for lib in lib_files: src_lib_path = join(root, lib) dst_lib_path = join(dst_lib_directory, relative_root, lib) if not exists(dst_lib_path): mx.abort('Error copying static libraries: {} missing in {}{}Original copytree error: {}'.format( join(relative_root, lib), dst_lib_directory, os.linesep, e)) src_lib_hash = mx.sha1OfFile(src_lib_path) dst_lib_hash = mx.sha1OfFile(dst_lib_path) if src_lib_hash != dst_lib_hash: mx.abort('Error copying static libraries: {} (hash={}) and {} (hash={}) differ{}Original copytree error: {}'.format( src_lib_path, src_lib_hash, dst_lib_path, dst_lib_hash, os.linesep, e)) # Allow older JDK versions to work else: lib_prefix = mx.add_lib_prefix('') lib_suffix = mx.add_static_lib_suffix('') lib_directory = join(jdk.home, 'lib') dst_lib_directory = join(dst_jdk_dir, 'lib') for f in os.listdir(lib_directory): if f.startswith(lib_prefix) and f.endswith(lib_suffix): lib_path = join(lib_directory, f) if isfile(lib_path): shutil.copy2(lib_path, dst_lib_directory) finally: if not mx.get_opts().verbose: # Preserve build directory so that javac command can be re-executed # by cutting and pasting verbose output. shutil.rmtree(build_dir) if not use_upgrade_module_path: # Create CDS archive (https://openjdk.java.net/jeps/341). out = mx.OutputCapture() mx.logv('[Creating CDS shared archive]') if mx.run([mx.exe_suffix(join(dst_jdk_dir, 'bin', 'java')), '-Xshare:dump', '-Xmx128M', '-Xms128M'], out=out, err=out, nonZeroIsFatal=False) != 0: mx.log(out.data) mx.abort('Error generating CDS shared archive') else: # -Xshare is incompatible with --upgrade-module-path pass return True
def build(self): source_dir = self.subject.getSourceDir() output_dir = self.subject.getOutputDir() if not emcc_dir: mx.abort("No EMCC_DIR specified - the source programs will not be compiled to .wasm.") emcc_cmd = os.path.join(emcc_dir, "emcc") gcc_cmd = os.path.join(gcc_dir, "gcc") if mx.run([emcc_cmd, "-v"], nonZeroIsFatal=False) != 0: mx.abort("Could not check the emcc version.") if mx.run([gcc_cmd, "--version"], nonZeroIsFatal=False) != 0: mx.abort("Could not check the gcc version.") if not wabt_dir: mx.abort("Set WABT_DIR if you want the binary to include .wat files.") mx.log("Building files from the source dir: " + source_dir) cc_flags = ["-O3", "-g2"] include_flags = [] disable_test_api_flags = ["-DDISABLE_TEST_API"] if hasattr(self.project, "includeset"): include_flags = ["-I", os.path.join(_suite.dir, "includes", self.project.includeset)] emcc_flags = cc_flags if self.project.isBenchmarkProject(): emcc_flags = emcc_flags + ["-s", "EXPORTED_FUNCTIONS=" + str(benchmark_methods).replace("'", "\"") + ""] subdir_program_names = defaultdict(lambda: []) for root, filename in self.subject.getProgramSources(): subdir = os.path.relpath(root, self.subject.getSourceDir()) mx.ensure_dir_exists(os.path.join(output_dir, subdir)) basename = remove_extension(filename) source_path = os.path.join(root, filename) output_wasm_path = os.path.join(output_dir, subdir, basename + ".wasm") timestampedSource = mx.TimeStampFile(source_path) timestampedOutput = mx.TimeStampFile(output_wasm_path) mustRebuild = timestampedSource.isNewerThan(timestampedOutput) or not timestampedOutput.exists() # Step 1: build the .wasm binary. if mustRebuild: if filename.endswith(".c"): # Step 1a: compile with the JS file, and store as files for running Node, if necessary. output_js_path = os.path.join(output_dir, subdir, basename + ".js") build_cmd_line = [emcc_cmd] + emcc_flags + disable_test_api_flags + [source_path, "-o", output_js_path] + include_flags if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not build the JS output of " + filename + " with emcc.") if self.subject.isBenchmarkProject(): node_dir = os.path.join(output_dir, subdir, NODE_BENCH_DIR) mx.ensure_dir_exists(node_dir) shutil.copyfile(output_js_path, os.path.join(node_dir, basename + ".js")) shutil.copyfile(output_wasm_path, os.path.join(node_dir, basename + ".wasm")) # Step 1b: extract the relevant information out of the JS file, and record it into an initialization file. init_info = self.extractInitialization(output_js_path) with open(os.path.join(output_dir, subdir, basename + ".init"), "w") as f: f.write(init_info) # Step 1c: compile to just a .wasm file, to avoid name mangling. build_cmd_line = [emcc_cmd] + emcc_flags + ["-s", "ERROR_ON_UNDEFINED_SYMBOLS=0"] + [source_path, "-o", output_wasm_path] + include_flags if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not build the wasm-only output of " + filename + " with emcc.") elif filename.endswith(".wat"): # Step 1: compile the .wat file to .wasm. wat2wasm_cmd = os.path.join(wabt_dir, "wat2wasm") build_cmd_line = [wat2wasm_cmd, "-o", output_wasm_path, source_path] if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not translate " + filename + " to binary format.") else: mx.logv("skipping, file is up-to-date: " + source_path) # Step 2: copy the result file if it exists. result_path = os.path.join(root, basename + ".result") if os.path.isfile(result_path): result_output_path = os.path.join(output_dir, subdir, basename + ".result") shutil.copyfile(result_path, result_output_path) # Step 3: copy the opts file if it exists. opts_path = os.path.join(root, basename + ".opts") if os.path.isfile(opts_path): opts_output_path = os.path.join(output_dir, subdir, basename + ".opts") shutil.copyfile(opts_path, opts_output_path) output_wat_path = os.path.join(output_dir, subdir, basename + ".wat") if mustRebuild: if filename.endswith(".c"): # Step 4: produce the .wat files, for easier debugging. wasm2wat_cmd = os.path.join(wabt_dir, "wasm2wat") if mx.run([wasm2wat_cmd, "-o", output_wat_path, output_wasm_path], nonZeroIsFatal=False) != 0: mx.abort("Could not compile .wat file for " + filename) elif filename.endswith(".wat"): # Step 4: copy the .wat file, for easier debugging. wat_path = os.path.join(root, basename + ".wat") shutil.copyfile(wat_path, output_wat_path) # Step 5: if this is a benchmark project, create native binaries too. if mustRebuild: mx.ensure_dir_exists(os.path.join(output_dir, subdir, NATIVE_BENCH_DIR)) if filename.endswith(".c"): output_path = os.path.join(output_dir, subdir, NATIVE_BENCH_DIR, mx.exe_suffix(basename)) link_flags = ["-lm"] gcc_cmd_line = [gcc_cmd] + cc_flags + disable_test_api_flags + [source_path, "-o", output_path] + include_flags + link_flags if mx.run(gcc_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not build the native binary of " + filename + ".") os.chmod(output_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) elif filename.endswith(".wat"): mx.warn("The .wat files are not translated to native binaries: " + filename) # Remember the source name. subdir_program_names[subdir].append(basename) for subdir in subdir_program_names: with open(os.path.join(output_dir, subdir, "wasm_test_index"), "w") as f: for name in subdir_program_names[subdir]: f.write(name) f.write("\n")
def jlink_new_jdk(jdk, dst_jdk_dir, module_dists, root_module_names=None, missing_export_target_action='create', with_source=lambda x: True, vendor_info=None, dedup_legal_notices=True): """ Uses jlink from `jdk` to create a new JDK image in `dst_jdk_dir` with `module_dists` and their dependencies added to the JDK image, replacing any existing modules of the same name. :param JDKConfig jdk: source JDK :param str dst_jdk_dir: path to use for the jlink --output option :param list module_dists: list of distributions defining modules :param list root_module_names: list of strings naming the module root set for the new JDK image. The named modules must either be in `module_dists` or in `jdk`. If None, then the root set will be all the modules in ``module_dists` and `jdk`. :param str missing_export_target_action: the action to perform for a qualifed export target that is not present in `module_dists` and does not have a hash stored in java.base. The choices are: "create" - an empty module is created "error" - raise an error None - do nothing :param lambda with_source: returns True if the sources of a module distribution must be included in the new JDK :param dict vendor_info: values for the jlink vendor options added by JDK-8232080 """ assert callable(with_source) if jdk.javaCompliance < '9': mx.abort('Cannot derive a new JDK from ' + jdk.home + ' with jlink since it is not JDK 9 or later') exploded_java_base_module = join(jdk.home, 'modules', 'java.base') if exists(exploded_java_base_module): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since it appears to be a developer build with exploded modules') jimage = join(jdk.home, 'lib', 'modules') jmods_dir = join(jdk.home, 'jmods') if not isfile(jimage): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jimage + ' is missing or is not an ordinary file') if not isdir(jmods_dir): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jmods_dir + ' is missing or is not a directory') # Exclude jdk.aot due to GR-10545 and JDK-8255616 jdk_modules = {jmd.name: jmd for jmd in jdk.get_modules() if jmd.name != 'jdk.aot'} modules = [as_java_module(dist, jdk) for dist in module_dists] all_module_names = frozenset(list(jdk_modules.keys()) + [m.name for m in modules]) # Read hashes stored in java.base (the only module in the JDK where hashes are stored) out = mx.LinesOutputCapture() mx.run([jdk.exe_path('jmod'), 'describe', jdk_modules['java.base'].get_jmod_path()], out=out) lines = out.lines hashes = {} for line in lines: if line.startswith('hashes'): parts = line.split() assert len(parts) == 4, 'expected hashes line to have 4 fields, got {} fields: {}'.format(len(parts), line) _, module_name, algorithm, hash_value = parts hashes[module_name] = (algorithm, hash_value) build_dir = mx.ensure_dir_exists(join(dst_jdk_dir + ".build")) try: # Handle targets of qualified exports that are not present in `modules` target_requires = {} for jmd in modules: for targets in jmd.exports.values(): for target in targets: if target not in all_module_names and target not in hashes: target_requires.setdefault(target, set()).add(jmd.name) if target_requires and missing_export_target_action is not None: if missing_export_target_action == 'error': mx.abort('Target(s) of qualified exports cannot be resolved: ' + '.'.join(target_requires.keys())) assert missing_export_target_action == 'create', 'invalid value for missing_export_target_action: ' + str(missing_export_target_action) extra_modules = [] for name, requires in target_requires.items(): module_jar = join(build_dir, name + '.jar') jmd = JavaModuleDescriptor(name, {}, requires={module: [] for module in requires}, uses=set(), provides={}, jarpath=module_jar) extra_modules.append(jmd) module_build_dir = mx.ensure_dir_exists(join(build_dir, name)) module_info_java = join(module_build_dir, 'module-info.java') module_info_class = join(module_build_dir, 'module-info.class') with open(module_info_java, 'w') as fp: print(jmd.as_module_info(), file=fp) mx.run([jdk.javac, '-d', module_build_dir, '--limit-modules=java.base,' + ','.join(jmd.requires.keys()), '--module-path=' + os.pathsep.join((m.jarpath for m in modules)), module_info_java]) with ZipFile(module_jar, 'w') as zf: zf.write(module_info_class, basename(module_info_class)) if exists(jmd.get_jmod_path()): os.remove(jmd.get_jmod_path()) mx.run([jdk.javac.replace('javac', 'jmod'), 'create', '--class-path=' + module_build_dir, jmd.get_jmod_path()]) modules.extend(extra_modules) all_module_names = frozenset(list(jdk_modules.keys()) + [m.name for m in modules]) # Extract src.zip from source JDK jdk_src_zip = join(jdk.home, 'lib', 'src.zip') dst_src_zip_contents = {} if isfile(jdk_src_zip): mx.logv('[Extracting ' + jdk_src_zip + ']') with ZipFile(jdk_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[name] = zf.read(name) else: mx.warn("'{}' does not exist or is not a file".format(jdk_src_zip)) # Edit lib/security/default.policy in java.base patched_java_base = join(build_dir, 'java.base.jmod') with open(join(jmods_dir, 'java.base.jmod'), 'rb') as src_f, open(patched_java_base, 'wb') as dst_f: jmod_header = src_f.read(4) if len(jmod_header) != 4 or jmod_header != b'JM\x01\x00': raise mx.abort("Unexpected jmod header: " + b2a_hex(jmod_header).decode('ascii')) dst_f.write(jmod_header) policy_result = 'not found' with ZipFile(src_f, 'r') as src_zip, ZipFile(dst_f, 'w', src_zip.compression) as dst_zip: for i in src_zip.infolist(): if i.filename[-1] == '/': continue src_member = src_zip.read(i) if i.filename == 'lib/security/default.policy': policy_result = 'unmodified' if 'grant codeBase "jrt:/com.oracle.graal.graal_enterprise"'.encode('utf-8') not in src_member: policy_result = 'modified' src_member += """ grant codeBase "jrt:/com.oracle.graal.graal_enterprise" { permission java.security.AllPermission; }; """.encode('utf-8') if 'grant codeBase "jrt:/org.graalvm.truffle"'.encode('utf-8') not in src_member: policy_result = 'modified' src_member += """ grant codeBase "jrt:/org.graalvm.truffle" { permission java.security.AllPermission; }; grant codeBase "jrt:/org.graalvm.sdk" { permission java.security.AllPermission; }; grant codeBase "jrt:/org.graalvm.locator" { permission java.io.FilePermission "<<ALL FILES>>", "read"; permission java.util.PropertyPermission "*", "read,write"; permission java.lang.RuntimePermission "createClassLoader"; permission java.lang.RuntimePermission "getClassLoader"; permission java.lang.RuntimePermission "getenv.*"; }; grant codeBase "file:${java.home}/languages/-" { permission java.security.AllPermission; }; """.encode('utf-8') dst_zip.writestr(i, src_member) if policy_result == 'not found': raise mx.abort("Couldn't find `lib/security/default.policy` in " + join(jmods_dir, 'java.base.jmod')) for jmd in modules: # Remove existing sources for all the modules that we include dst_src_zip_contents = {key: dst_src_zip_contents[key] for key in dst_src_zip_contents if not key.startswith(jmd.name)} if with_source(jmd.dist): # Add the sources that we can share. # Extract module sources jmd_src_zip = jmd.jarpath[0:-len('.jar')] + '.src.zip' if isfile(jmd_src_zip): mx.logv('[Extracting ' + jmd_src_zip + ']') with ZipFile(jmd_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[jmd.name + '/' + name] = zf.read(name) # Add module-info.java to sources dst_src_zip_contents[jmd.name + '/module-info.java'] = jmd.as_module_info(extras_as_comments=False) # Now build the new JDK image with jlink jlink = [jdk.javac.replace('javac', 'jlink')] if jdk_enables_jvmci_by_default(jdk): # On JDK 9+, +EnableJVMCI forces jdk.internal.vm.ci to be in the root set jlink += ['-J-XX:-EnableJVMCI', '-J-XX:-UseJVMCICompiler'] if root_module_names is not None: missing = frozenset(root_module_names) - all_module_names if missing: mx.abort('Invalid module(s): {}.\nAvailable modules: {}'.format(','.join(missing), ','.join(sorted(all_module_names)))) jlink.append('--add-modules=' + ','.join(root_module_names)) else: jlink.append('--add-modules=' + ','.join(sorted(all_module_names))) module_path = patched_java_base + os.pathsep + jmods_dir if modules: module_path = os.pathsep.join((m.get_jmod_path(respect_stripping=True) for m in modules)) + os.pathsep + module_path jlink.append('--module-path=' + module_path) jlink.append('--output=' + dst_jdk_dir) # These options are derived from how OpenJDK runs jlink to produce the final runtime image. jlink.extend(['-J-XX:+UseSerialGC', '-J-Xms32M', '-J-Xmx512M', '-J-XX:TieredStopAtLevel=1']) jlink.append('-J-Dlink.debug=true') if dedup_legal_notices: jlink.append('--dedup-legal-notices=error-if-not-same-content') jlink.append('--keep-packaged-modules=' + join(dst_jdk_dir, 'jmods')) if jdk_has_new_jlink_options(jdk): if jdk_omits_warning_for_jlink_set_ThreadPriorityPolicy(jdk): thread_priority_policy_option = ' -XX:ThreadPriorityPolicy=1' else: mx.logv('[Creating JDK without -XX:ThreadPriorityPolicy=1]') thread_priority_policy_option = '' if jdk_supports_enablejvmciproduct(jdk): if any((m.name == 'jdk.internal.vm.compiler' for m in modules)): jlink.append('--add-options=-XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCIProduct -XX:-UnlockExperimentalVMOptions' + thread_priority_policy_option) else: # Don't default to using JVMCI as JIT unless Graal is being updated in the image. # This avoids unexpected issues with using the out-of-date Graal compiler in # the JDK itself. jlink.append('--add-options=-XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCIProduct -XX:-UseJVMCICompiler -XX:-UnlockExperimentalVMOptions' + thread_priority_policy_option) else: mx.logv('[Creating JDK without -XX:+EnableJVMCIProduct]') if thread_priority_policy_option: jlink.append('--add-options=' + thread_priority_policy_option.strip()) if vendor_info is not None: for name, value in vendor_info.items(): jlink.append('--' + name + '=' + value) release_file = join(jdk.home, 'release') if isfile(release_file): jlink.append('--release-info=' + release_file) # TODO: investigate the options below used by OpenJDK to see if they should be used: # --order-resources: specifies order of resources in generated lib/modules file. # This is apparently not so important if a CDS archive is available. # --generate-jli-classes: pre-generates a set of java.lang.invoke classes. # See https://github.com/openjdk/jdk/blob/master/make/GenerateLinkOptData.gmk mx.logv('[Creating JDK image in {}]'.format(dst_jdk_dir)) mx.run(jlink) dst_src_zip = join(dst_jdk_dir, 'lib', 'src.zip') mx.logv('[Creating ' + dst_src_zip + ']') with ZipFile(dst_src_zip, 'w', compression=ZIP_DEFLATED, allowZip64=True) as zf: for name, contents in sorted(dst_src_zip_contents.items()): zf.writestr(name, contents) mx.logv('[Copying static libraries]') lib_directory = join(jdk.home, 'lib', 'static') if exists(lib_directory): dst_lib_directory = join(dst_jdk_dir, 'lib', 'static') try: mx.copytree(lib_directory, dst_lib_directory) except shutil.Error as e: # On AArch64, there can be a problem in the copystat part # of copytree which occurs after file and directory copying # has successfully completed. Since the metadata doesn't # matter in this case, just ensure that the content was copied. for root, _, lib_files in os.walk(lib_directory): relative_root = os.path.relpath(root, dst_lib_directory) for lib in lib_files: src_lib_path = join(root, lib) dst_lib_path = join(dst_lib_directory, relative_root, lib) if not exists(dst_lib_path): mx.abort('Error copying static libraries: {} missing in {}{}Original copytree error: {}'.format( join(relative_root, lib), dst_lib_directory, os.linesep, e)) src_lib_hash = mx.sha1OfFile(src_lib_path) dst_lib_hash = mx.sha1OfFile(dst_lib_path) if src_lib_hash != dst_lib_hash: mx.abort('Error copying static libraries: {} (hash={}) and {} (hash={}) differ{}Original copytree error: {}'.format( src_lib_path, src_lib_hash, dst_lib_path, dst_lib_hash, os.linesep, e)) # Allow older JDK versions to work else: lib_prefix = mx.add_lib_prefix('') lib_suffix = mx.add_static_lib_suffix('') lib_directory = join(jdk.home, 'lib') dst_lib_directory = join(dst_jdk_dir, 'lib') for f in os.listdir(lib_directory): if f.startswith(lib_prefix) and f.endswith(lib_suffix): lib_path = join(lib_directory, f) if isfile(lib_path): shutil.copy2(lib_path, dst_lib_directory) finally: if not mx.get_opts().verbose: # Preserve build directory so that javac command can be re-executed # by cutting and pasting verbose output. shutil.rmtree(build_dir) # Create CDS archive (https://openjdk.java.net/jeps/341). out = mx.OutputCapture() mx.logv('[Creating CDS shared archive]') if mx.run([mx.exe_suffix(join(dst_jdk_dir, 'bin', 'java')), '-Xshare:dump', '-Xmx128M', '-Xms128M'], out=out, err=out, nonZeroIsFatal=False) != 0: mx.log(out.data) mx.abort('Error generating CDS shared archive')
def _sigtest_check(checktype, args, suite=None, projects=None): """run sigtest against Java projects with API""" nonTestProjects = [p for p in mx.projects() if _should_test_project(p)] if not nonTestProjects: return 1 javaCompliance = max([p.javaCompliance for p in nonTestProjects]) class OutputCapture: def __init__(self): self.data = "" def __call__(self, data): self.data += data failed = None for p in nonTestProjects: sigtestlib = p.suite.getMxCompatibility().get_sigtest_jar() sigtestResults = p.dir + os.sep + 'snapshot.sigtest' if not os.path.exists(sigtestResults): continue jdk = mx.get_jdk(javaCompliance) cmd = [ '-cp', mx._cygpathU2W(sigtestlib), 'com.sun.tdk.signaturetest.SignatureTest', '-BootCP', '-Static', '-Mode', 'bin', '-FileName', sigtestResults, '-ClassPath', mx.classpath(p, jdk=jdk), ] if args.human: cmd.append('-H') if checktype != 'all': cmd.append('-b') for pkg in mx._find_packages(p): cmd = cmd + ['-PackageWithoutSubpackages', pkg] out = OutputCapture() print('Checking ' + checktype + ' signature changes against ' + sigtestResults) exitcode = mx.run_java(cmd, nonZeroIsFatal=False, jdk=mx.get_jdk(javaCompliance), out=out, err=out) mx.ensure_dir_exists(p.get_output_root()) with open(p.get_output_root() + os.path.sep + 'sigtest-junit.xml', 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8" ?>\n') f.write('<testsuite tests="1" name="' + p.name + '.sigtest.' + checktype + '">\n') f.write('<testcase classname="' + p.name + '" name="sigtest.' + checktype + '">\n') if exitcode != 95: print(out.data) failed = sigtestResults f.write('<failure type="SignatureCheck"><![CDATA[\n') f.write(out.data) f.write(']]></failure>') else: f.write('<system-err><![CDATA[\n') f.write(out.data) f.write(']]></system-err>') f.write('</testcase>\n') f.write('</testsuite>\n') if failed: print( '\nThe signature check detected changes in the API by comparing it with previous signature files.' ) print( 'To fix this restore the original API or regenerate the signature files with:' ) print('mx sigtest --generate') mx.abort('Signature error in ' + failed) else: print('OK.') return 0
def testdownstream(suite, repoUrls, relTargetSuiteDir, mxCommands, branch=None): """ Tests a downstream repo against the current working directory state of `suite`. :param mx.Suite suite: the suite to test against the downstream repo :param list repoUrls: URLs of downstream repos to clone, the first of which is the repo being tested :param str relTargetSuiteDir: directory of the downstream suite to test relative to the top level directory of the downstream repo being tested :param list mxCommands: argument lists for the mx commands run in downstream suite being tested :param str branch: name of branch to look for in downstream repo(s) """ assert len(repoUrls) > 0 workDir = join(suite.get_output_root(), 'testdownstream') # A mirror of each suites in the same repo as `suite` is created with symlinks rel_mirror = os.path.relpath(suite.dir, mx.SuiteModel.siblings_dir(suite.dir)) in_subdir = os.sep in rel_mirror suites_in_repo = [suite] if in_subdir: base = os.path.dirname(suite.dir) for e in os.listdir(base): candidate = join(base, e) if candidate != suite.dir: mxDir = mx._is_suite_dir(candidate) if mxDir: matches = [s for s in mx.suites() if s.dir == candidate] if len(matches) == 0: suites_in_repo.append( mx.SourceSuite(mxDir, primary=False, load=False)) else: suites_in_repo.append(matches[0]) for suite_in_repo in suites_in_repo: rel_mirror = os.path.relpath( suite_in_repo.dir, mx.SuiteModel.siblings_dir(suite_in_repo.dir)) mirror = join(workDir, rel_mirror) if exists(mirror): shutil.rmtree(mirror) mx.ensure_dir_exists(mirror) for f in os.listdir(suite_in_repo.dir): subDir = join(suite_in_repo.dir, f) if subDir == suite_in_repo.get_output_root(): continue src = join(suite_in_repo.dir, f) dst = join(mirror, f) mx.logv('[Creating symlink from {} to {}]'.format(dst, src)) relsrc = os.path.relpath(src, os.path.dirname(dst)) os.symlink(relsrc, dst) targetDir = None for repoUrl in repoUrls: # Deduce a target name from the target URL url = urlparse(repoUrl) targetName = url.path if targetName.rfind('/') != -1: targetName = targetName[targetName.rfind('/') + 1:] if targetName.endswith('.git'): targetName = targetName[0:-len('.git')] repoWorkDir = join(workDir, targetName) git = mx.GitConfig() if exists(repoWorkDir): git.pull(repoWorkDir) else: git.clone(repoUrl, repoWorkDir) # See if there's a matching (non-master) branch and use it if there is if not branch: branch = git.git_command( suite.dir, ['rev-parse', '--abbrev-ref', 'HEAD']).strip() if branch != 'master': git.git_command(repoWorkDir, ['checkout', branch], abortOnError=False) if not targetDir: targetDir = repoWorkDir assert not isabs(relTargetSuiteDir) targetSuiteDir = join(targetDir, relTargetSuiteDir) assert targetSuiteDir.startswith(targetDir) mxpy = None if suite != mx._mx_suite else join(mirror, 'mx.py') for command in mxCommands: mx.logv('[running "mx ' + ' '.join(command) + '" in ' + targetSuiteDir + ']') mx.run_mx(command, targetSuiteDir, mxpy=mxpy)