def _runmake(args): """run the JDK make process To build hotspot and import it into the JDK: "mx make hotspot import-hotspot" {0}""" jdkBuildDir = _get_jdk_build_dir() if not exists(jdkBuildDir): # JDK9 must be bootstrapped with a JDK8 compliance = mx.JavaCompliance('8') jdk8 = mx.get_jdk(compliance.exactMatch, versionDescription=compliance.value) cmd = ['sh', 'configure', '--with-debug-level=' + _vm.debugLevel, '--with-native-debug-symbols=external', '--disable-precompiled-headers', '--with-jvm-features=graal', '--with-jvm-variants=' + _vm.jvmVariant, '--disable-warnings-as-errors', '--with-boot-jdk=' + jdk8.home, '--with-jvm-features=graal'] mx.run(cmd, cwd=_jdkSourceRoot) cmd = [mx.gmake_cmd(), 'CONF=' + _vm.debugLevel] if mx.get_opts().verbose: cmd.append('LOG=debug') cmd.extend(args) if mx.get_opts().use_jdk_image and 'images' not in args: cmd.append('images') if not mx.get_opts().verbose: mx.log('--------------- make execution ----------------------') mx.log('Working directory: ' + _jdkSourceRoot) mx.log('Command line: ' + ' '.join(cmd)) mx.log('-----------------------------------------------------') mx.run(cmd, cwd=_jdkSourceRoot)
def graalvm_components(opt_limit_to_suite=False): """ :rtype: list[GraalVmComponent] """ if opt_limit_to_suite and mx.get_opts().specific_suites: return [c for c in _graalvm_components.values() if c.suite.name in mx.get_opts().specific_suites] else: return _graalvm_components.values()
def getJavaVm(self, bmSuiteArgs): jvm = self.jvm(bmSuiteArgs) jvmConfig = self.jvmConfig(bmSuiteArgs) if jvm is None: if mx.get_opts().vm is not None: mx.log("Defaulting --jvm to the deprecated --vm value. Please use --jvm.") jvm = mx.get_opts().vm else: mx.log("Defaulting the JVM to 'server'.") jvm = "server" if jvmConfig is None: mx.log("Defaulting --jvm-config to 'default'. Consider adding --jvm-config.") jvmConfig = "default" return get_java_vm(jvm, jvmConfig)
def ruby_command(args): """runs Ruby""" java_home = os.getenv('JAVA_HOME', '/usr') java = os.getenv('JAVACMD', java_home + '/bin/java') argv0 = java vmArgs, rubyArgs, user_classpath, print_command = extractArguments(args) classpath = mx.classpath(['TRUFFLE_API', 'RUBY']).split(':') truffle_api, classpath = classpath[0], classpath[1:] assert os.path.basename(truffle_api) == "truffle-api.jar" # Give precedence to graal classpath and VM options classpath = user_classpath + classpath vmArgs = vmArgs + [ # '-Xss2048k', '-Xbootclasspath/a:' + truffle_api, '-cp', ':'.join(classpath), 'org.jruby.truffle.Main' ] allArgs = vmArgs + ['-X+T'] + rubyArgs env = setup_jruby_home() if print_command: if mx.get_opts().verbose: log('Environment variables:') for key in sorted(env.keys()): log(key + '=' + env[key]) log(java + ' ' + ' '.join(map(pipes.quote, allArgs))) return os.execve(java, [argv0] + allArgs, env)
def native_image_context(common_args=None, hosted_assertions=True, debug_gr_8964=False, native_image_cmd=''): common_args = [] if common_args is None else common_args base_args = ['-H:+EnforceMaxRuntimeCompileMethods'] base_args += ['-H:Path=' + svmbuild_dir()] if debug_gr_8964: base_args += ['-Ddebug_gr_8964=true'] if mx.get_opts().verbose: base_args += ['--verbose'] if mx.get_opts().very_verbose: base_args += ['--verbose-server'] if hosted_assertions: base_args += native_image_context.hosted_assertions if native_image_cmd: if not exists(native_image_cmd): mx.abort('Given native_image_cmd does not exist') else: native_image_cmd = native_image_path(suite_native_image_root()) if exists(native_image_cmd): mx.log('Use ' + native_image_cmd + ' for remaining image builds') def _native_image(args, **kwargs): mx.run([native_image_cmd] + args, **kwargs) else: _native_image = native_image_on_jvm def query_native_image(all_args, option): out = mx.LinesOutputCapture() _native_image(['--dry-run'] + all_args, out=out) for line in out.lines: _, sep, after = line.partition(option) if sep: return after.split(' ')[0].rstrip() return None def native_image_func(args, debug_gr_8964=False, **kwargs): all_args = base_args + common_args + args path = query_native_image(all_args, '-H:Path=') name = query_native_image(all_args, '-H:Name=') image = join(path, name) _native_image(all_args, **kwargs) return image try: if exists(native_image_cmd): _native_image(['--server-wipe']) yield native_image_func finally: if exists(native_image_cmd): _native_image(['--server-shutdown'])
def clean(self, forBuild=False): if forBuild: return maven_repo_arg, env = mavenSetup() quiet = [] if mx.get_opts().verbose else ['-q'] mx.run_maven(quiet + [maven_repo_arg, 'clean'], nonZeroIsFatal=False, cwd=_suite.dir, env=env) jar = self.newestOutput() if jar.exists(): os.remove(jar.path)
def _runmake(args): """run the JDK make process To build hotspot and import it into the JDK: "mx make hotspot import-hotspot" {0}""" jdkBuildDir = _get_jdk_build_dir() if not exists(jdkBuildDir): # JDK9 must be bootstrapped with a JDK8 compliance = mx.JavaCompliance('8') jdk8 = mx.get_jdk(compliance.exactMatch, versionDescription=compliance.value) cmd = ['sh', 'configure', '--with-debug-level=' + _vm.debugLevel, '--with-native-debug-symbols=external', '--disable-precompiled-headers', '--with-jvm-variants=' + _vm.jvmVariant, '--disable-warnings-as-errors', '--with-boot-jdk=' + jdk8.home] mx.run(cmd, cwd=_jdkSourceRoot) cmd = [mx.gmake_cmd(), 'CONF=' + _vm.debugLevel] if mx.get_opts().verbose: cmd.append('LOG=debug') cmd.extend(args) if mx.get_opts().use_jdk_image and 'images' not in args: cmd.append('images') if not mx.get_opts().verbose: mx.log('--------------- make execution ----------------------') mx.log('Working directory: ' + _jdkSourceRoot) mx.log('Command line: ' + ' '.join(cmd)) mx.log('-----------------------------------------------------') mx.run(cmd, cwd=_jdkSourceRoot) if 'images' in cmd: jdkImageDir = join(jdkBuildDir, 'images', 'jdk') # The OpenJDK build creates an empty cacerts file so copy one from # the default JDK (which is assumed to be an OracleJDK) srcCerts = join(mx.get_jdk(tag='default').home, 'lib', 'security', 'cacerts') if not exists(srcCerts): # Might be building with JDK8 which has cacerts under jre/ srcCerts = join(mx.get_jdk(tag='default').home, 'jre', 'lib', 'security', 'cacerts') dstCerts = join(jdkImageDir, 'lib', 'security', 'cacerts') if srcCerts != dstCerts: shutil.copyfile(srcCerts, dstCerts) _create_jdk_bundle(jdkBuildDir, _vm.debugLevel, jdkImageDir)
def run(vmArgs, unittest, extraOption=None, extraLibs=None): if extraOption is None: extraOption = [] if mx.get_opts().verbose: command = mx_sulong.getCommonOptions(True, extraLibs) + extraOption + vmArgs + ['--very-verbose', unittest] print ('Running mx unittest ' + ' '.join(command)) return mx_unittest.unittest(command) else: command = mx_sulong.getCommonOptions(True, extraLibs) + extraOption + vmArgs + [unittest] return mx_unittest.unittest(command)
def build(self): cwd = _suite.dir maven_repo_arg, env = mavenSetup() mx.log("Building jruby-core with Maven") quiet = [] if mx.get_opts().verbose else ['-q'] mx.run_maven(quiet + ['-DskipTests', maven_repo_arg, '-Dcreate.sources.jar', '-pl', 'core,lib'], cwd=cwd, env=env) # Install Bundler gem_home = join(_suite.dir, 'lib', 'ruby', 'gems', 'shared') env['GEM_HOME'] = gem_home env['GEM_PATH'] = gem_home mx.run(['bin/jruby', 'bin/gem', 'install', 'bundler', '-v', '1.10.6'], cwd=cwd, env=env)
def mavenSetup(): buildPack = join(_suite.dir, 'jruby-build-pack/maven') mavenDir = buildPack if isdir(buildPack) else join(_suite.dir, 'mxbuild/mvn') maven_repo_arg = '-Dmaven.repo.local=' + mavenDir env = os.environ.copy() if not mx.get_opts().verbose: env['JRUBY_BUILD_MORE_QUIET'] = 'true' # HACK: since the maven executable plugin does not configure the # java executable that is used we unfortunately need to prepend it to the PATH javaHome = os.getenv('JAVA_HOME') if javaHome: env["PATH"] = javaHome + '/bin' + os.pathsep + env["PATH"] mx.logv('Setting PATH to {}'.format(os.environ["PATH"])) mx.run(['java', '-version'], env=env) return maven_repo_arg, env
def runTool(self, args, errorMsg=None): try: if not mx.get_opts().verbose: f = open(os.devnull, 'w') ret = mx.run(args, out=f, err=f) else: f = None ret = mx.run(args) except SystemExit: ret = -1 if errorMsg is None: print('\nError: Cannot run %s' % args) else: print('\nError: %s\n%s' % (errorMsg, ' '.join(args))) if f is not None: f.close() return ret
def get_jvmci_jdk(debugLevel=None): """ Gets the JVMCI JDK corresponding to 'debugLevel'. """ if not debugLevel: debugLevel = _vm.debugLevel jdk = _jvmci_jdks.get(debugLevel) if jdk is None: try: jdk = JVMCI9JDKConfig(debugLevel) except mx.JDKConfigException as e: jdkBuildDir = _get_jdk_build_dir(debugLevel) msg = 'Error with the JDK built into {}:\n{}\nTry (re)building it with: mx --jdk-debug-level={} make' if mx.get_opts().use_jdk_image: msg += ' images' mx.abort(msg.format(jdkBuildDir, e.message, debugLevel)) _jvmci_jdks[debugLevel] = jdk return jdk
def _runmultimake(args): """run the JDK make process for one or more configurations""" jvmVariantsDefault = ','.join(_jdkJvmVariants) debugLevelsDefault = ','.join(_jdkDebugLevels) parser = ArgumentParser(prog='mx multimake') parser.add_argument('--jdk-jvm-variants', '--vms', help='a comma separated list of VMs to build (default: ' + jvmVariantsDefault + ')', metavar='<args>', default=jvmVariantsDefault) parser.add_argument('--jdk-debug-levels', '--builds', help='a comma separated list of JDK debug levels (default: ' + debugLevelsDefault + ')', metavar='<args>', default=debugLevelsDefault) parser.add_argument('-n', '--no-check', action='store_true', help='omit running "java -version" after each build') select = parser.add_mutually_exclusive_group() select.add_argument('-c', '--console', action='store_true', help='send build output to console instead of log files') select.add_argument('-d', '--output-dir', help='directory for log files instead of current working directory', default=os.getcwd(), metavar='<dir>') args = parser.parse_args(args) jvmVariants = args.jdk_jvm_variants.split(',') debugLevels = [_translateLegacyDebugLevel(dl) for dl in args.jdk_debug_levels.split(',')] allStart = time.time() for jvmVariant in jvmVariants: for debugLevel in debugLevels: if not args.console: logFile = join(mx.ensure_dir_exists(args.output_dir), jvmVariant + '-' + debugLevel + '.log') log = open(logFile, 'wb') start = time.time() mx.log('BEGIN: ' + jvmVariant + '-' + debugLevel + '\t(see: ' + logFile + ')') verbose = ['-v'] if mx.get_opts().verbose else [] # Run as subprocess so that output can be directed to a file cmd = [sys.executable, '-u', mx.__file__] + verbose + ['--jdk-jvm-variant=' + jvmVariant, '--jdk-debug-level=' + debugLevel, 'make'] mx.logv("executing command: " + str(cmd)) subprocess.check_call(cmd, cwd=_suite.dir, stdout=log, stderr=subprocess.STDOUT) duration = datetime.timedelta(seconds=time.time() - start) mx.log('END: ' + jvmVariant + '-' + debugLevel + '\t[' + str(duration) + ']') else: with VM(jvmVariant=jvmVariant, debugLevel=debugLevel): _runmake([]) if not args.no_check: with VM(jvmciMode='jit'): run_vm(['-XX:-BootstrapJVMCI', '-version']) allDuration = datetime.timedelta(seconds=time.time() - allStart) mx.log('TOTAL TIME: ' + '[' + str(allDuration) + ']')
def _use_multiarch(self): return self.multiarch and mx.get_opts().multiarch
def __init__(self, debugLevel): self.debugLevel = debugLevel jdkBuildDir = _get_jdk_build_dir(debugLevel) jdkDir = join(jdkBuildDir, 'images', 'jdk') if mx.get_opts().use_jdk_image else join(jdkBuildDir, 'jdk') mx.JDKConfig.__init__(self, jdkDir, tag=_JVMCI_JDK_TAG)
def sl(args): """run an SL program""" mx.get_opts().jdk = 'jvmci' mx_truffle.sl(args)
def __exit__(self, exc_type, exc_value, traceback): mx.get_opts().ptimeout = self.prev_ptimeout
def build(self): if mx.get_opts().use_jdk_image: _runmake(['images']) else: _runmake([]) self._newestOutput = None
def build(self): pre_ts = GraalNodeJsBuildTask._get_newest_ts(self.subject.getResults(), fatalIfMissing=False) pythonPath = join(_suite.mxDir, 'python2') prevPATH = os.environ['PATH'] _setEnvVar('PATH', "%s:%s" % (pythonPath, prevPATH)) debug_mode = hasattr(self.args, 'debug') and self.args.debug debug = ['--debug'] if debug_mode else [] shared_library = ['--enable-shared-library'] if hasattr( self.args, 'sharedlibrary') and self.args.sharedlibrary else [] if _currentOs == 'windows': _mxrun([ join(_suite.dir, 'vcbuild.bat'), 'projgen', 'no-cctest', 'noetw', 'nosnapshot', 'java-home', _getJdkHome() ] + debug + shared_library, cwd=_suite.dir, verbose=True) else: newest_config_file_ts = GraalNodeJsBuildTask._get_newest_ts( _config_files, fatalIfMissing=True) newest_generated_config_file_ts = GraalNodeJsBuildTask._get_newest_ts( _generated_config_files, fatalIfMissing=False) # Lazily generate config files only if `configure` and `configure.py` are older than the files they generate. # If we don't do this, the `Makefile` always considers `config.gypi` out of date, triggering a second, unnecessary configure. lazy_generator = ['--lazy-generator' ] if newest_generated_config_file_ts.isNewerThan( newest_config_file_ts) else [] _mxrun([ join(_suite.dir, 'configure'), '--partly-static', '--without-dtrace', '--without-snapshot', '--without-node-snapshot', '--java-home', _getJdkHome() ] + debug + shared_library + lazy_generator, cwd=_suite.dir, verbose=True) verbose = 'V={}'.format('1' if mx.get_opts().verbose else '') _mxrun([mx.gmake_cmd(), '-j%d' % self.parallelism, verbose], cwd=_suite.dir, verbose=True) # put headers for native modules into out/headers _setEnvVar('HEADERS_ONLY', '1') out = None if mx.get_opts().verbose else open(os.devnull, 'w') _mxrun([ python_cmd(), join('tools', 'install.py'), 'install', join('out', 'headers'), '/' ], out=out) post_ts = GraalNodeJsBuildTask._get_newest_ts( self.subject.getResults(), fatalIfMissing=True) mx.logv( 'Newest time-stamp before building: {}\nNewest time-stamp after building: {}\nHas built? {}' .format(pre_ts, post_ts, post_ts.isNewerThan(pre_ts))) built = post_ts.isNewerThan(pre_ts) if built and _currentOs == 'darwin': nodePath = join(_suite.dir, 'out', 'Debug' if debug_mode else 'Release', 'node') _mxrun([ 'install_name_tool', '-add_rpath', join(_getJdkHome(), 'jre', 'lib'), '-add_rpath', join(_getJdkHome(), 'lib'), nodePath ], verbose=True) return built
def jlink_new_jdk(jdk, dst_jdk_dir, module_dists, ignore_dists, root_module_names=None, missing_export_target_action='create', with_source=lambda x: True, vendor_info=None, dedup_legal_notices=True, use_upgrade_module_path=False): """ Uses jlink from `jdk` to create a new JDK image in `dst_jdk_dir` with `module_dists` and their dependencies added to the JDK image, replacing any existing modules of the same name. :param JDKConfig jdk: source JDK :param str dst_jdk_dir: path to use for the jlink --output option :param list module_dists: list of distributions defining modules :param list ignore_dists: list of distributions that should be ignored for missing_export_target_action :param list root_module_names: list of strings naming the module root set for the new JDK image. The named modules must either be in `module_dists` or in `jdk`. If None, then the root set will be all the modules in ``module_dists` and `jdk`. :param str missing_export_target_action: the action to perform for a qualified export target that is not present in `module_dists` and does not have a hash stored in java.base. The choices are: "create" - an empty module is created "error" - raise an error None - do nothing :param lambda with_source: returns True if the sources of a module distribution must be included in the new JDK :param dict vendor_info: values for the jlink vendor options added by JDK-8232080 :param bool use_upgrade_module_path: if True, then instead of linking `module_dists` into the image, resolve them via --upgrade-module-path at image runtime :return bool: False if use_upgrade_module_path == True and the existing image is up to date otherwise True """ assert callable(with_source) if jdk.javaCompliance < '9': mx.abort('Cannot derive a new JDK from ' + jdk.home + ' with jlink since it is not JDK 9 or later') exploded_java_base_module = join(jdk.home, 'modules', 'java.base') if exists(exploded_java_base_module): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since it appears to be a developer build with exploded modules') jimage = join(jdk.home, 'lib', 'modules') jmods_dir = join(jdk.home, 'jmods') if not isfile(jimage): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jimage + ' is missing or is not an ordinary file') if not isdir(jmods_dir): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jmods_dir + ' is missing or is not a directory') # Exclude jdk.aot due to GR-10545 and JDK-8255616 jdk_modules = {jmd.name: jmd for jmd in jdk.get_modules() if jmd.name != 'jdk.aot'} modules = [as_java_module(dist, jdk) for dist in module_dists] module_names = frozenset((m.name for m in modules)) all_module_names = frozenset(list(jdk_modules.keys())) | module_names # Read hashes stored in java.base (the only module in the JDK where hashes are stored) hashes = _read_java_base_hashes(jdk) build_dir = mx.ensure_dir_exists(join(dst_jdk_dir + ".build")) # Directory under dst_jdk_dir for artifacts related to use_upgrade_module_path upgrade_dir = join(dst_jdk_dir, 'upgrade_modules_support') # Map from JavaModuleDescriptors to post-jlink jar location. synthetic_modules = OrderedDict() try: ignore_module_names = set(mx_javamodules.get_module_name(mx.dependency(ignore_dist)) for ignore_dist in ignore_dists) # Synthesize modules for targets of qualified exports that are not present in `modules`. # Without this, runtime module resolution will fail due to missing modules. target_requires = {} for jmd in modules: for targets in jmd.exports.values(): for target in targets: if target not in all_module_names and target not in ignore_module_names and target not in hashes: target_requires.setdefault(target, set()).add(jmd.name) if target_requires and missing_export_target_action is not None: if missing_export_target_action == 'error': mx.abort('Target(s) of qualified exports cannot be resolved: ' + '.'.join(target_requires.keys())) assert missing_export_target_action == 'create', 'invalid value for missing_export_target_action: ' + str(missing_export_target_action) for name, requires in sorted(target_requires.items()): module_jar = join(build_dir, name + '.jar') jmd = JavaModuleDescriptor(name, {}, requires={module: [] for module in requires}, uses=set(), provides={}, jarpath=module_jar) module_build_dir = mx.ensure_dir_exists(join(build_dir, name)) module_info = jmd.as_module_info() module_info_java = join(module_build_dir, 'module-info.java') module_info_class = join(module_build_dir, 'module-info.class') dst_module_jar = join(upgrade_dir, name + '.jar') synthetic_modules[jmd] = dst_module_jar if use_upgrade_module_path and exists(dst_module_jar): with ZipFile(dst_module_jar, 'r') as zf: previous_module_info = zf.read('module-info.java').decode() if previous_module_info == module_info: mx.logv('[Reusing synthetic module {}]'.format(name)) os.rename(dst_module_jar, module_jar) continue mx.logv('[Rebuilding synthetic module {} as module descriptor changed]'.format(name)) with open(module_info_java, 'w') as fp: fp.write(module_info) mx.run([jdk.javac, '-d', module_build_dir, '--limit-modules=java.base,' + ','.join(jmd.requires.keys()), '--module-path=' + os.pathsep.join((m.jarpath for m in modules)), module_info_java]) with ZipFile(module_jar, 'w') as zf: zf.write(module_info_java, 'module-info.java') zf.write(module_info_class, 'module-info.class') if exists(jmd.get_jmod_path()): os.remove(jmd.get_jmod_path()) if not use_upgrade_module_path: mx.run([jdk.javac.replace('javac', 'jmod'), 'create', '--class-path=' + module_build_dir, jmd.get_jmod_path()]) modules.extend(synthetic_modules.keys()) module_names = frozenset((m.name for m in modules)) all_module_names = frozenset(list(jdk_modules.keys())) | module_names # Edit lib/security/default.policy in java.base patched_java_base = _patch_default_security_policy(build_dir, jmods_dir, dst_jdk_dir) # Now build the new JDK image with jlink jlink = [jdk.javac.replace('javac', 'jlink')] if jdk_enables_jvmci_by_default(jdk): # On JDK 9+, +EnableJVMCI forces jdk.internal.vm.ci to be in the root set jlink += ['-J-XX:-EnableJVMCI', '-J-XX:-UseJVMCICompiler'] jlink.append('--add-modules=' + ','.join(_get_image_root_modules(root_module_names, module_names, jdk_modules.keys(), use_upgrade_module_path))) module_path = patched_java_base + os.pathsep + jmods_dir if modules and not use_upgrade_module_path: module_path = os.pathsep.join((m.get_jmod_path(respect_stripping=True) for m in modules)) + os.pathsep + module_path jlink.append('--module-path=' + module_path) jlink.append('--output=' + dst_jdk_dir) # These options are derived from how OpenJDK runs jlink to produce the final runtime image. jlink.extend(['-J-XX:+UseSerialGC', '-J-Xms32M', '-J-Xmx512M', '-J-XX:TieredStopAtLevel=1']) jlink.append('-J-Dlink.debug=true') if dedup_legal_notices: jlink.append('--dedup-legal-notices=error-if-not-same-content') jlink.append('--keep-packaged-modules=' + join(dst_jdk_dir, 'jmods')) vm_options_path = join(upgrade_dir, 'vm_options') vm_options = _get_image_vm_options(jdk, use_upgrade_module_path, modules, synthetic_modules) if vm_options: jlink.append('--add-options=' + ' '.join(vm_options)) if jdk_has_new_jlink_options(jdk) and vendor_info is not None: for name, value in vendor_info.items(): jlink.append('--' + name + '=' + value) release_file = join(jdk.home, 'release') if isfile(release_file): jlink.append('--release-info=' + release_file) if exists(dst_jdk_dir): if use_upgrade_module_path and _vm_options_match(vm_options, vm_options_path): mx.logv('[Existing JDK image {} is up to date]'.format(dst_jdk_dir)) return False mx.rmtree(dst_jdk_dir) # TODO: investigate the options below used by OpenJDK to see if they should be used: # --order-resources: specifies order of resources in generated lib/modules file. # This is apparently not so important if a CDS archive is available. # --generate-jli-classes: pre-generates a set of java.lang.invoke classes. # See https://github.com/openjdk/jdk/blob/master/make/GenerateLinkOptData.gmk mx.logv('[Creating JDK image in {}]'.format(dst_jdk_dir)) mx.run(jlink) if use_upgrade_module_path: # Move synthetic upgrade modules into final location for jmd, jarpath in synthetic_modules.items(): mx.ensure_dir_exists(dirname(jarpath)) os.rename(jmd.jarpath, jarpath) # Persist VM options cooked into image to be able to skip a subsequent # jlink execution if the options do not change. with open(vm_options_path, 'w') as fp: fp.write(os.linesep.join(vm_options)) # Create src.zip in new JDK image _copy_src_zip(jdk.home, dst_jdk_dir, modules, lambda jmd: not use_upgrade_module_path and with_source(jmd.dist)) mx.logv('[Copying static libraries]') lib_directory = join(jdk.home, 'lib', 'static') if exists(lib_directory): dst_lib_directory = join(dst_jdk_dir, 'lib', 'static') try: mx.copytree(lib_directory, dst_lib_directory) except shutil.Error as e: # On AArch64, there can be a problem in the copystat part # of copytree which occurs after file and directory copying # has successfully completed. Since the metadata doesn't # matter in this case, just ensure that the content was copied. for root, _, lib_files in os.walk(lib_directory): relative_root = os.path.relpath(root, dst_lib_directory) for lib in lib_files: src_lib_path = join(root, lib) dst_lib_path = join(dst_lib_directory, relative_root, lib) if not exists(dst_lib_path): mx.abort('Error copying static libraries: {} missing in {}{}Original copytree error: {}'.format( join(relative_root, lib), dst_lib_directory, os.linesep, e)) src_lib_hash = mx.sha1OfFile(src_lib_path) dst_lib_hash = mx.sha1OfFile(dst_lib_path) if src_lib_hash != dst_lib_hash: mx.abort('Error copying static libraries: {} (hash={}) and {} (hash={}) differ{}Original copytree error: {}'.format( src_lib_path, src_lib_hash, dst_lib_path, dst_lib_hash, os.linesep, e)) # Allow older JDK versions to work else: lib_prefix = mx.add_lib_prefix('') lib_suffix = mx.add_static_lib_suffix('') lib_directory = join(jdk.home, 'lib') dst_lib_directory = join(dst_jdk_dir, 'lib') for f in os.listdir(lib_directory): if f.startswith(lib_prefix) and f.endswith(lib_suffix): lib_path = join(lib_directory, f) if isfile(lib_path): shutil.copy2(lib_path, dst_lib_directory) finally: if not mx.get_opts().verbose: # Preserve build directory so that javac command can be re-executed # by cutting and pasting verbose output. shutil.rmtree(build_dir) if not use_upgrade_module_path: # Create CDS archive (https://openjdk.java.net/jeps/341). out = mx.OutputCapture() mx.logv('[Creating CDS shared archive]') if mx.run([mx.exe_suffix(join(dst_jdk_dir, 'bin', 'java')), '-Xshare:dump', '-Xmx128M', '-Xms128M'], out=out, err=out, nonZeroIsFatal=False) != 0: mx.log(out.data) mx.abort('Error generating CDS shared archive') else: # -Xshare is incompatible with --upgrade-module-path pass return True
def _parse_error(msg): if mx.get_opts().verbose: mx.log('saw expected SpecError: ' + msg) raise SpecError(msg)
def __enter__(self): self.verbose = mx.get_opts().verbose mx.get_opts().verbose = True
def __exit__(self, exc_type, exc_value, traceback): mx.get_opts().verbose = self.verbose
def _test(): """ Mx suite specific tests. """ from collections import namedtuple # JavaCompliance tests good_specs = [ (2, True), (1.2, True), (11, True), (200, True), ('2', True), ('1.2', True), ('1.8', True), ('1.5+', False), ('2..4', False), ('1.8..9', False), ('2..3,4+', False), ('2..3,4,7+', False), ('2..3,4..5,7+', False), ('2..3,4..5,7,8,9,10,15..18,120', False), ] bad_specs = [ 1, '1', '1.1', '1.10', '1.8..1.10', '1.10+', '2..1', '2..2', '1,,3', '1..3+', '1+,4..5', '13+ignored', '1..3,7..5', '4,7,1..3,', '4..5,1..3', ] range_case = namedtuple('range_case', ['spec', 'range_spec', 'should_match']) range_specs = [ range_case('1.8', '1.8', True), range_case('11', '11', True), range_case('17', '17', True), range_case('1.8', '1.7', False), range_case('1.8', '11', False), range_case('17', '1.8', False), range_case('1.8', '11..17', False), range_case('11', '11..17', True), range_case('15', '11..17', True), range_case('17', '11..17', True), range_case('19', '11..17', False), range_case('11..17', '11..17', True), range_case('13..14', '11..17', True), range_case('11..19', '11..17', False), range_case('16', '11..15,17', False), range_case('11..12,14..15', '11..15,17', True), range_case('11,12,13,14,15,16,17', '11..17', True), range_case('11+', '11..17', False), ] for spec, exact in good_specs: p = mx.JavaCompliance(spec) assert p._is_exact_bound() is exact, p # Just ensure these methods execute without exception p.as_version_check() p._values(stop=20) hash(p) if mx.get_opts().verbose: if isinstance(spec, str): spec = '"' + spec + '"' mx.log('{}: str="{}", repr="{}", hash={}'.format( spec, str(p), repr(p), hash(p))) for spec in bad_specs: class SpecError(Exception): pass def _parse_error(msg): if mx.get_opts().verbose: mx.log('saw expected SpecError: ' + msg) raise SpecError(msg) try: mx.JavaCompliance(spec, parse_error=_parse_error) mx.abort('expected SpecError while parsing "{}"'.format(spec)) except SpecError: pass for spec, range_spec, should_match in range_specs: match = spec in mx.JavaCompliance(range_spec) assert match == should_match, '"{}" in "{}" should returns {}'.format( spec, range_spec, should_match)
def _runmultimake(args): """run the JDK make process for one or more configurations""" jvmVariantsDefault = ','.join(_jdkJvmVariants) debugLevelsDefault = ','.join(_jdkDebugLevels) parser = ArgumentParser(prog='mx multimake') parser.add_argument( '--jdk-jvm-variants', '--vms', help='a comma separated list of VMs to build (default: ' + jvmVariantsDefault + ')', metavar='<args>', default=jvmVariantsDefault) parser.add_argument( '--jdk-debug-levels', '--builds', help='a comma separated list of JDK debug levels (default: ' + debugLevelsDefault + ')', metavar='<args>', default=debugLevelsDefault) parser.add_argument('-n', '--no-check', action='store_true', help='omit running "java -version" after each build') select = parser.add_mutually_exclusive_group() select.add_argument( '-c', '--console', action='store_true', help='send build output to console instead of log files') select.add_argument( '-d', '--output-dir', help='directory for log files instead of current working directory', default=os.getcwd(), metavar='<dir>') args = parser.parse_args(args) jvmVariants = args.jdk_jvm_variants.split(',') debugLevels = [ _translateLegacyDebugLevel(dl) for dl in args.jdk_debug_levels.split(',') ] allStart = time.time() for jvmVariant in jvmVariants: for debugLevel in debugLevels: if not args.console: logFile = join(mx.ensure_dir_exists(args.output_dir), jvmVariant + '-' + debugLevel + '.log') log = open(logFile, 'wb') start = time.time() mx.log('BEGIN: ' + jvmVariant + '-' + debugLevel + '\t(see: ' + logFile + ')') verbose = ['-v'] if mx.get_opts().verbose else [] # Run as subprocess so that output can be directed to a file cmd = [sys.executable, '-u', mx.__file__] + verbose + [ '--jdk-jvm-variant=' + jvmVariant, '--jdk-debug-level=' + debugLevel, 'make' ] mx.logv("executing command: " + str(cmd)) subprocess.check_call(cmd, cwd=_suite.dir, stdout=log, stderr=subprocess.STDOUT) duration = datetime.timedelta(seconds=time.time() - start) mx.log('END: ' + jvmVariant + '-' + debugLevel + '\t[' + str(duration) + ']') else: with VM(jvmVariant=jvmVariant, debugLevel=debugLevel): _runmake([]) if not args.no_check: with VM(jvmciMode='jit'): run_vm(['-XX:-BootstrapJVMCI', '-version']) allDuration = datetime.timedelta(seconds=time.time() - allStart) mx.log('TOTAL TIME: ' + '[' + str(allDuration) + ']')
def _test(): """ Mx suite specific tests. """ # JavaCompliance tests good_specs = [ (2, True), (1.2, True), (11, True), (200, True), ('2', True), ('1.2', True), ('1.8', True), ('1.5+', False), ('2..4', False), ('1.8..9', False), ('2..3,4+', False), ('2..3,4,7+', False), ('2..3,4..5,7+', False), ('2..3,4..5,7,8,9,10,15..18,120', False), ] bad_specs = [ 1, '1', '1.1', '1.10', '1.8..1.10', '1.10+', '2..1', '2..2', '1,,3', '1..3+', '1+,4..5', '13+ignored', '1..3,7..5', '4,7,1..3,', '4..5,1..3', ] for spec, exact in good_specs: p = mx.JavaCompliance(spec) assert p._is_exact_bound() is exact, p # Just ensure these methods execute without exception p.as_version_check() p._values(stop=20) hash(p) if mx.get_opts().verbose: if isinstance(spec, str): spec = '"' + spec + '"' mx.log('{}: str="{}", repr="{}", hash={}'.format( spec, str(p), repr(p), hash(p))) for spec in bad_specs: class SpecError(Exception): pass def _parse_error(msg): if mx.get_opts().verbose: mx.log('saw expected SpecError: ' + msg) raise SpecError(msg) try: mx.JavaCompliance(spec, parse_error=_parse_error) mx.abort('expected SpecError while parsing "{}"'.format(spec)) except SpecError: pass
def __enter__(self): self.prev_ptimeout = mx.get_opts().ptimeout mx.get_opts().ptimeout = self.ptimeout return self
def build(self): pre_ts = GraalNodeJsBuildTask._get_newest_ts(self.subject.getResults(), fatalIfMissing=False) build_env = os.environ.copy() _setEnvVar( 'PATH', '%s%s%s' % (join(_suite.mxDir, 'python2'), os.pathsep, build_env['PATH']), build_env) debug = ['--debug'] if self._debug_mode else [] shared_library = ['--enable-shared-library'] if hasattr( self.args, 'sharedlibrary') and self.args.sharedlibrary else [] newest_config_file_ts = GraalNodeJsBuildTask._get_newest_ts( _config_files, fatalIfMissing=True) newest_generated_config_file_ts = GraalNodeJsBuildTask._get_newest_ts( _generated_config_files, fatalIfMissing=False) # Lazily generate config files only if `configure` and `configure.py` are older than the files they generate. # If we don't do this, the `Makefile` always considers `config.gypi` out of date, triggering a second, unnecessary configure. lazy_generator = ['--lazy-generator' ] if newest_generated_config_file_ts.isNewerThan( newest_config_file_ts) else [] if _currentOs == 'windows': devkit_root = build_env.get('DEVKIT_ROOT') if devkit_root is not None: _setEnvVar('GYP_MSVS_OVERRIDE_PATH', devkit_root, build_env) _setEnvVar('GYP_MSVS_VERSION', build_env.get('DEVKIT_VERSION'), build_env) _setEnvVar( 'PATH', '%s%s%s' % (join(devkit_root, 'VC', 'bin', 'x64'), os.pathsep, build_env['PATH']), build_env) _setEnvVar('WINDOWSSDKDIR', join(devkit_root, '10'), build_env) _setEnvVar( 'INCLUDE', r'{devkit}\VC\include;{devkit}\VC\atlmfc\include;{devkit}\10\include\shared;{devkit}\10\include\ucrt;{devkit}\10\include\um;{devkit}\10\include\winrt;{prev}' .format(devkit=devkit_root, prev=build_env['INCLUDE']), build_env) _setEnvVar( 'LIB', r'{devkit}\VC\lib\x64;{devkit}\VC\atlmfc\lib\x64;{devkit}\10\lib\x64;{prev}' .format(devkit=devkit_root, prev=build_env['LIB']), build_env) _setEnvVar( 'PATH', os.pathsep.join([build_env['PATH']] + [ mx.library(lib_name).get_path(True) for lib_name in ('NASM', 'NINJA') ]), build_env) extra_flags = [ '--ninja', '--dest-cpu=x64', '--without-etw', '--without-snapshot' ] else: extra_flags = [] _mxrun(python_cmd() + [ join(_suite.dir, 'configure'), '--partly-static', '--without-dtrace', '--without-snapshot', '--without-node-snapshot', '--java-home', _java_home() ] + debug + shared_library + lazy_generator + extra_flags, cwd=_suite.dir, verbose=True, env=build_env) if _currentOs == 'windows': verbose = ['-v'] if mx.get_opts().verbose else [] # The custom env is not used to resolve the location of the executable _mxrun([join(mx.library('NINJA').get_path(True), 'ninja.exe')] + verbose + ['-j%d' % self.parallelism, '-C', self._build_dir], env=build_env) else: verbose = 'V={}'.format('1' if mx.get_opts().verbose else '') _mxrun([mx.gmake_cmd(), '-j%d' % self.parallelism, verbose], cwd=_suite.dir, verbose=True, env=build_env) # put headers for native modules into out/headers _setEnvVar('HEADERS_ONLY', '1', build_env) out = None if mx.get_opts().verbose else open(os.devnull, 'w') _mxrun(python_cmd() + [ join('tools', 'install.py'), 'install', join('out', 'headers'), '/' ], out=out, env=build_env) post_ts = GraalNodeJsBuildTask._get_newest_ts( self.subject.getResults(), fatalIfMissing=True) mx.logv( 'Newest time-stamp before building: {}\nNewest time-stamp after building: {}\nHas built? {}' .format(pre_ts, post_ts, post_ts.isNewerThan(pre_ts))) built = post_ts.isNewerThan(pre_ts) if built and _currentOs == 'darwin': nodePath = join(self._build_dir, 'node') _mxrun([ 'install_name_tool', '-add_rpath', join(_java_home(), 'jre', 'lib'), '-add_rpath', join(_java_home(), 'lib'), nodePath ], verbose=True, env=build_env) return built
def jlink_new_jdk(jdk, dst_jdk_dir, module_dists, root_module_names=None, missing_export_target_action='create', with_source=lambda x: True, vendor_info=None, dedup_legal_notices=True): """ Uses jlink from `jdk` to create a new JDK image in `dst_jdk_dir` with `module_dists` and their dependencies added to the JDK image, replacing any existing modules of the same name. :param JDKConfig jdk: source JDK :param str dst_jdk_dir: path to use for the jlink --output option :param list module_dists: list of distributions defining modules :param list root_module_names: list of strings naming the module root set for the new JDK image. The named modules must either be in `module_dists` or in `jdk`. If None, then the root set will be all the modules in ``module_dists` and `jdk`. :param str missing_export_target_action: the action to perform for a qualifed export target that is not present in `module_dists` and does not have a hash stored in java.base. The choices are: "create" - an empty module is created "error" - raise an error None - do nothing :param lambda with_source: returns True if the sources of a module distribution must be included in the new JDK :param dict vendor_info: values for the jlink vendor options added by JDK-8232080 """ assert callable(with_source) if jdk.javaCompliance < '9': mx.abort('Cannot derive a new JDK from ' + jdk.home + ' with jlink since it is not JDK 9 or later') exploded_java_base_module = join(jdk.home, 'modules', 'java.base') if exists(exploded_java_base_module): mx.abort( 'Cannot derive a new JDK from ' + jdk.home + ' since it appears to be a developer build with exploded modules') jimage = join(jdk.home, 'lib', 'modules') jmods_dir = join(jdk.home, 'jmods') if not isfile(jimage): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jimage + ' is missing or is not an ordinary file') if not isdir(jmods_dir): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jmods_dir + ' is missing or is not a directory') jdk_modules = {jmd.name: jmd for jmd in jdk.get_modules()} modules = [as_java_module(dist, jdk) for dist in module_dists] all_module_names = frozenset( list(jdk_modules.keys()) + [m.name for m in modules]) # Read hashes stored in java.base (the only module in the JDK where hashes are stored) out = mx.LinesOutputCapture() mx.run([ jdk.exe_path('jmod'), 'describe', jdk_modules['java.base'].get_jmod_path() ], out=out) lines = out.lines hashes = {} for line in lines: if line.startswith('hashes'): parts = line.split() assert len( parts ) == 4, 'expected hashes line to have 4 fields, got {} fields: {}'.format( len(parts), line) _, module_name, algorithm, hash_value = parts hashes[module_name] = (algorithm, hash_value) build_dir = mx.ensure_dir_exists(join(dst_jdk_dir + ".build")) try: # Handle targets of qualified exports that are not present in `modules` target_requires = {} for jmd in modules: for targets in jmd.exports.values(): for target in targets: if target not in all_module_names and target not in hashes: target_requires.setdefault(target, set()).add(jmd.name) if target_requires and missing_export_target_action is not None: if missing_export_target_action == 'error': mx.abort( 'Target(s) of qualified exports cannot be resolved: ' + '.'.join(target_requires.keys())) assert missing_export_target_action == 'create', 'invalid value for missing_export_target_action: ' + str( missing_export_target_action) extra_modules = [] for name, requires in target_requires.items(): module_jar = join(build_dir, name + '.jar') jmd = JavaModuleDescriptor( name, {}, requires={module: [] for module in requires}, uses=set(), provides={}, jarpath=module_jar) extra_modules.append(jmd) module_build_dir = mx.ensure_dir_exists(join(build_dir, name)) module_info_java = join(module_build_dir, 'module-info.java') module_info_class = join(module_build_dir, 'module-info.class') with open(module_info_java, 'w') as fp: print(jmd.as_module_info(), file=fp) mx.run([ jdk.javac, '-d', module_build_dir, '--limit-modules=java.base,' + ','.join(jmd.requires.keys()), '--module-path=' + os.pathsep.join( (m.jarpath for m in modules)), module_info_java ]) with ZipFile(module_jar, 'w') as zf: zf.write(module_info_class, basename(module_info_class)) if exists(jmd.get_jmod_path()): os.remove(jmd.get_jmod_path()) mx.run([ jdk.javac.replace('javac', 'jmod'), 'create', '--class-path=' + module_build_dir, jmd.get_jmod_path() ]) modules.extend(extra_modules) all_module_names = frozenset( list(jdk_modules.keys()) + [m.name for m in modules]) # Extract src.zip from source JDK jdk_src_zip = join(jdk.home, 'lib', 'src.zip') dst_src_zip_contents = {} if isfile(jdk_src_zip): mx.logv('[Extracting ' + jdk_src_zip + ']') with ZipFile(jdk_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[name] = zf.read(name) else: mx.warn("'{}' does not exist or is not a file".format(jdk_src_zip)) # Edit lib/security/default.policy in java.base patched_java_base = join(build_dir, 'java.base.jmod') with open(join(jmods_dir, 'java.base.jmod'), 'rb') as src_f, open(patched_java_base, 'wb') as dst_f: jmod_header = src_f.read(4) if len(jmod_header) != 4 or jmod_header != b'JM\x01\x00': raise mx.abort("Unexpected jmod header: " + b2a_hex(jmod_header).decode('ascii')) dst_f.write(jmod_header) policy_result = 'not found' with ZipFile(src_f, 'r') as src_zip, ZipFile( dst_f, 'w', src_zip.compression) as dst_zip: for i in src_zip.infolist(): if i.filename[-1] == '/': continue src_member = src_zip.read(i) if i.filename == 'lib/security/default.policy': policy_result = 'unmodified' if 'grant codeBase "jrt:/com.oracle.graal.graal_enterprise"'.encode( 'utf-8') not in src_member: policy_result = 'modified' src_member += """ grant codeBase "jrt:/com.oracle.graal.graal_enterprise" { permission java.security.AllPermission; }; """.encode('utf-8') if 'grant codeBase "jrt:/org.graalvm.truffle"'.encode( 'utf-8') not in src_member: policy_result = 'modified' src_member += """ grant codeBase "jrt:/org.graalvm.truffle" { permission java.security.AllPermission; }; grant codeBase "jrt:/org.graalvm.sdk" { permission java.security.AllPermission; }; grant codeBase "jrt:/org.graalvm.locator" { permission java.io.FilePermission "<<ALL FILES>>", "read"; permission java.util.PropertyPermission "*", "read,write"; permission java.lang.RuntimePermission "createClassLoader"; permission java.lang.RuntimePermission "getClassLoader"; permission java.lang.RuntimePermission "getenv.*"; }; grant codeBase "file:${java.home}/languages/-" { permission java.security.AllPermission; }; """.encode('utf-8') dst_zip.writestr(i, src_member) if policy_result == 'not found': raise mx.abort( "Couldn't find `lib/security/default.policy` in " + join(jmods_dir, 'java.base.jmod')) for jmd in modules: # Remove existing sources for all the modules that we include dst_src_zip_contents = { key: dst_src_zip_contents[key] for key in dst_src_zip_contents if not key.startswith(jmd.name) } if with_source(jmd.dist): # Add the sources that we can share. # Extract module sources jmd_src_zip = jmd.jarpath[0:-len('.jar')] + '.src.zip' if isfile(jmd_src_zip): mx.logv('[Extracting ' + jmd_src_zip + ']') with ZipFile(jmd_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[jmd.name + '/' + name] = zf.read(name) # Add module-info.java to sources dst_src_zip_contents[jmd.name + '/module-info.java'] = jmd.as_module_info( extras_as_comments=False) # Now build the new JDK image with jlink jlink = [jdk.javac.replace('javac', 'jlink')] if jdk_enables_jvmci_by_default(jdk): # On JDK 9+, +EnableJVMCI forces jdk.internal.vm.ci to be in the root set jlink += ['-J-XX:-EnableJVMCI', '-J-XX:-UseJVMCICompiler'] if root_module_names is not None: missing = frozenset(root_module_names) - all_module_names if missing: mx.abort( 'Invalid module(s): {}.\nAvailable modules: {}'.format( ','.join(missing), ','.join(sorted(all_module_names)))) jlink.append('--add-modules=' + ','.join(root_module_names)) else: jlink.append('--add-modules=' + ','.join(sorted(all_module_names))) module_path = patched_java_base + os.pathsep + jmods_dir if modules: module_path = os.pathsep.join( (m.get_jmod_path(respect_stripping=True) for m in modules)) + os.pathsep + module_path jlink.append('--module-path=' + module_path) jlink.append('--output=' + dst_jdk_dir) # These options are derived from how OpenJDK runs jlink to produce the final runtime image. jlink.extend([ '-J-XX:+UseSerialGC', '-J-Xms32M', '-J-Xmx512M', '-J-XX:TieredStopAtLevel=1' ]) jlink.append('-J-Dlink.debug=true') if dedup_legal_notices: jlink.append('--dedup-legal-notices=error-if-not-same-content') jlink.append('--keep-packaged-modules=' + join(dst_jdk_dir, 'jmods')) if jdk_has_new_jlink_options(jdk): if jdk_omits_warning_for_jlink_set_ThreadPriorityPolicy(jdk): thread_priority_policy_option = ' -XX:ThreadPriorityPolicy=1' else: mx.logv('[Creating JDK without -XX:ThreadPriorityPolicy=1]') thread_priority_policy_option = '' if jdk_supports_enablejvmciproduct(jdk): if any( (m.name == 'jdk.internal.vm.compiler' for m in modules)): jlink.append( '--add-options=-XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCIProduct -XX:-UnlockExperimentalVMOptions' + thread_priority_policy_option) else: # Don't default to using JVMCI as JIT unless Graal is being updated in the image. # This avoids unexpected issues with using the out-of-date Graal compiler in # the JDK itself. jlink.append( '--add-options=-XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCIProduct -XX:-UseJVMCICompiler -XX:-UnlockExperimentalVMOptions' + thread_priority_policy_option) else: mx.logv('[Creating JDK without -XX:+EnableJVMCIProduct]') if thread_priority_policy_option: jlink.append('--add-options=' + thread_priority_policy_option.strip()) if vendor_info is not None: for name, value in vendor_info.items(): jlink.append('--' + name + '=' + value) release_file = join(jdk.home, 'release') if isfile(release_file): jlink.append('--release-info=' + release_file) # TODO: investigate the options below used by OpenJDK to see if they should be used: # --order-resources: specifies order of resources in generated lib/modules file. # This is apparently not so important if a CDS archive is available. # --generate-jli-classes: pre-generates a set of java.lang.invoke classes. # See https://github.com/openjdk/jdk/blob/master/make/GenerateLinkOptData.gmk mx.logv('[Creating JDK image in {}]'.format(dst_jdk_dir)) mx.run(jlink) dst_src_zip = join(dst_jdk_dir, 'lib', 'src.zip') mx.logv('[Creating ' + dst_src_zip + ']') with ZipFile(dst_src_zip, 'w', compression=ZIP_DEFLATED, allowZip64=True) as zf: for name, contents in sorted(dst_src_zip_contents.items()): zf.writestr(name, contents) mx.logv('[Copying static libraries]') lib_directory = join(jdk.home, 'lib', 'static') if exists(lib_directory): dst_lib_directory = join(dst_jdk_dir, 'lib', 'static') try: mx.copytree(lib_directory, dst_lib_directory) except shutil.Error as e: # On AArch64, there can be a problem in the copystat part # of copytree which occurs after file and directory copying # has successfully completed. Since the metadata doesn't # matter in this case, just ensure that the content was copied. for root, _, lib_files in os.walk(lib_directory): relative_root = os.path.relpath(root, dst_lib_directory) for lib in lib_files: src_lib_path = join(root, lib) dst_lib_path = join(dst_lib_directory, relative_root, lib) if not exists(dst_lib_path): mx.abort( 'Error copying static libraries: {} missing in {}{}Original copytree error: {}' .format(join(relative_root, lib), dst_lib_directory, os.linesep, e)) src_lib_hash = mx.sha1OfFile(src_lib_path) dst_lib_hash = mx.sha1OfFile(dst_lib_path) if src_lib_hash != dst_lib_hash: mx.abort( 'Error copying static libraries: {} (hash={}) and {} (hash={}) differ{}Original copytree error: {}' .format(src_lib_path, src_lib_hash, dst_lib_path, dst_lib_hash, os.linesep, e)) # Allow older JDK versions to work else: lib_prefix = mx.add_lib_prefix('') lib_suffix = mx.add_static_lib_suffix('') lib_directory = join(jdk.home, 'lib') dst_lib_directory = join(dst_jdk_dir, 'lib') for f in os.listdir(lib_directory): if f.startswith(lib_prefix) and f.endswith(lib_suffix): lib_path = join(lib_directory, f) if isfile(lib_path): shutil.copy2(lib_path, dst_lib_directory) finally: if not mx.get_opts().verbose: # Preserve build directory so that javac command can be re-executed # by cutting and pasting verbose output. shutil.rmtree(build_dir) # Create CDS archive (https://openjdk.java.net/jeps/341). out = mx.OutputCapture() mx.logv('[Creating CDS shared archive]') if mx.run([ mx.exe_suffix(join(dst_jdk_dir, 'bin', 'java')), '-Xshare:dump', '-Xmx128M', '-Xms128M' ], out=out, err=out, nonZeroIsFatal=False) != 0: mx.log(out.data) mx.abort('Error generating CDS shared archive')
def jlink_new_jdk(jdk, dst_jdk_dir, module_dists, root_module_names=None, missing_export_target_action='create', with_source=lambda x: True): """ Uses jlink from `jdk` to create a new JDK image in `dst_jdk_dir` with `module_dists` and their dependencies added to the JDK image, replacing any existing modules of the same name. :param JDKConfig jdk: source JDK :param str dst_jdk_dir: path to use for the jlink --output option :param list module_dists: list of distributions defining modules :param list root_module_names: list of strings naming the module root set for the new JDK image. The named modules must either be in `module_dists` or in `jdk`. If None, then the root set will be all the modules in ``module_dists` and `jdk`. :param str missing_export_target_action: the action to perform for a qualifed export target that is not present in `module_dists` and does not have a hash stored in java.base. The choices are: "create" - an empty module is created "error" - raise an error None - do nothing :param lambda with_source: returns True if the sources of a module distribution must be included in the new JDK """ assert callable(with_source) if jdk.javaCompliance < '9': mx.abort('Cannot derive a new JDK from ' + jdk.home + ' with jlink since it is not JDK 9 or later') exploded_java_base_module = join(jdk.home, 'modules', 'java.base') if exists(exploded_java_base_module): mx.abort( 'Cannot derive a new JDK from ' + jdk.home + ' since it appears to be a developer build with exploded modules') jimage = join(jdk.home, 'lib', 'modules') jmods_dir = join(jdk.home, 'jmods') if not isfile(jimage): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jimage + ' is missing or is not an ordinary file') if not isdir(jmods_dir): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jmods_dir + ' is missing or is not a directory') jdk_modules = {jmd.name: jmd for jmd in jdk.get_modules()} modules = [as_java_module(dist, jdk) for dist in module_dists] all_module_names = frozenset( list(jdk_modules.keys()) + [m.name for m in modules]) # Read hashes stored in java.base (the only module in the JDK where hashes are stored) out = mx.LinesOutputCapture() mx.run([ jdk.exe_path('jmod'), 'describe', jdk_modules['java.base'].get_jmod_path() ], out=out) lines = out.lines hashes = {} for line in lines: if line.startswith('hashes'): parts = line.split() assert len( parts ) == 4, 'expected hashes line to have 4 fields, got {} fields: {}'.format( len(parts), line) _, module_name, algorithm, hash_value = parts hashes[module_name] = (algorithm, hash_value) build_dir = mx.ensure_dir_exists(join(dst_jdk_dir + ".build")) try: # Handle targets of qualified exports that are not present in `modules` target_requires = {} for jmd in modules: for targets in jmd.exports.values(): for target in targets: if target not in all_module_names and target not in hashes: target_requires.setdefault(target, set()).add(jmd.name) if target_requires and missing_export_target_action is not None: if missing_export_target_action == 'error': mx.abort( 'Target(s) of qualified exports cannot be resolved: ' + '.'.join(target_requires.keys())) assert missing_export_target_action == 'create', 'invalid value for missing_export_target_action: ' + str( missing_export_target_action) extra_modules = [] for name, requires in target_requires.items(): module_jar = join(build_dir, name + '.jar') jmd = JavaModuleDescriptor( name, {}, requires={module: [] for module in requires}, uses=set(), provides={}, jarpath=module_jar) extra_modules.append(jmd) module_build_dir = mx.ensure_dir_exists(join(build_dir, name)) module_info_java = join(module_build_dir, 'module-info.java') module_info_class = join(module_build_dir, 'module-info.class') with open(module_info_java, 'w') as fp: print(jmd.as_module_info(), file=fp) mx.run([jdk.javac, '-d', module_build_dir, \ '--limit-modules=java.base,' + ','.join(jmd.requires.keys()), \ '--module-path=' + os.pathsep.join((m.jarpath for m in modules)), \ module_info_java]) with ZipFile(module_jar, 'w') as zf: zf.write(module_info_class, basename(module_info_class)) if exists(jmd.get_jmod_path()): os.remove(jmd.get_jmod_path()) mx.run([ jdk.javac.replace('javac', 'jmod'), 'create', '--class-path=' + module_build_dir, jmd.get_jmod_path() ]) modules.extend(extra_modules) all_module_names = frozenset( list(jdk_modules.keys()) + [m.name for m in modules]) # Extract src.zip from source JDK jdk_src_zip = join(jdk.home, 'lib', 'src.zip') dst_src_zip_contents = {} if isfile(jdk_src_zip): mx.logv('[Extracting ' + jdk_src_zip + ']') with ZipFile(jdk_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[name] = zf.read(name) else: mx.warn("'{}' does not exist or is not a file".format(jdk_src_zip)) for jmd in modules: # Remove existing sources for all the modules that we include dst_src_zip_contents = { key: dst_src_zip_contents[key] for key in dst_src_zip_contents if not key.startswith(jmd.name) } if with_source(jmd.dist): # Add the sources that we can share. # Extract module sources jmd_src_zip = jmd.jarpath[0:-len('.jar')] + '.src.zip' if isfile(jmd_src_zip): mx.logv('[Extracting ' + jmd_src_zip + ']') with ZipFile(jmd_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[jmd.name + '/' + name] = zf.read(name) # Add module-info.java to sources dst_src_zip_contents[jmd.name + '/module-info.java'] = jmd.as_module_info( extras_as_comments=False) # Now build the new JDK image with jlink jlink = [jdk.javac.replace('javac', 'jlink')] if jdk_enables_jvmci_by_default(jdk): # On JDK 9+, +EnableJVMCI forces jdk.internal.vm.ci to be in the root set jlink.append('-J-XX:-EnableJVMCI') if root_module_names is not None: missing = frozenset(root_module_names) - all_module_names if missing: mx.abort( 'Invalid module(s): {}.\nAvailable modules: {}'.format( ','.join(missing), ','.join(sorted(all_module_names)))) jlink.append('--add-modules=' + ','.join(root_module_names)) else: jlink.append('--add-modules=' + ','.join(sorted(all_module_names))) module_path = jmods_dir if modules: module_path = os.pathsep.join( (m.get_jmod_path(respect_stripping=True) for m in modules)) + os.pathsep + module_path jlink.append('--module-path=' + module_path) jlink.append('--output=' + dst_jdk_dir) # These options are inspired by how OpenJDK runs jlink to produce the final runtime image. jlink.extend([ '-J-XX:+UseSerialGC', '-J-Xms32M', '-J-Xmx512M', '-J-XX:TieredStopAtLevel=1' ]) jlink.append('-J-Dlink.debug=true') jlink.append('--dedup-legal-notices=error-if-not-same-content') jlink.append('--keep-packaged-modules=' + join(dst_jdk_dir, 'jmods')) # TODO: investigate the options below used by OpenJDK to see if they should be used: # --release-info: this allow extra properties to be written to the <jdk>/release file # --order-resources: specifies order of resources in generated lib/modules file. # This is apparently not so important if a CDS archive is available. # --generate-jli-classes: pre-generates a set of java.lang.invoke classes. # See https://github.com/openjdk/jdk/blob/master/make/GenerateLinkOptData.gmk mx.logv('[Creating JDK image]') mx.run(jlink) dst_src_zip = join(dst_jdk_dir, 'lib', 'src.zip') mx.logv('[Creating ' + dst_src_zip + ']') with ZipFile(dst_src_zip, 'w', compression=ZIP_DEFLATED, allowZip64=True) as zf: for name, contents in sorted(dst_src_zip_contents.items()): zf.writestr(name, contents) mx.logv('[Copying static libraries]') lib_prefix = mx.add_lib_prefix('') lib_suffix = '.lib' if mx.is_windows() else '.a' lib_directory = join(jdk.home, 'lib') dst_lib_directory = join(dst_jdk_dir, 'lib') for f in os.listdir(lib_directory): if f.startswith(lib_prefix) and f.endswith(lib_suffix): lib_path = join(lib_directory, f) if isfile(lib_path): shutil.copy2(lib_path, dst_lib_directory) # Build the list of modules whose classes might have annotations # to be processed by native-image (GR-15192). with open(join(dst_jdk_dir, 'lib', 'native-image-modules.list'), 'w') as fp: print( '# Modules whose classes might have annotations processed by native-image', file=fp) for m in modules: print(m.name, file=fp) finally: if not mx.get_opts().verbose: # Preserve build directory so that javac command can be re-executed # by cutting and pasting verbose output. shutil.rmtree(build_dir) # Create CDS archive (https://openjdk.java.net/jeps/341). out = mx.OutputCapture() mx.logv('[Creating CDS shared archive]') if mx.run([ mx.exe_suffix(join(dst_jdk_dir, 'bin', 'java')), '-Xshare:dump', '-Xmx128M', '-Xms128M' ], out=out, err=out, nonZeroIsFatal=False) != 0: mx.log(out.data) mx.abort('Error generating CDS shared archive')
def build(self): pre_ts = GraalNodeJsBuildTask._get_newest_ts(self.subject.getResults(), fatalIfMissing=False) build_env = os.environ.copy() _setEnvVar( 'PATH', '%s%s%s' % (join(_suite.mxDir, 'python2'), pathsep, build_env['PATH']), build_env) debug = ['--debug'] if self._debug_mode else [] shared_library = ['--enable-shared-library'] if hasattr( self.args, 'sharedlibrary') and self.args.sharedlibrary else [] newest_config_file_ts = GraalNodeJsBuildTask._get_newest_ts( _config_files, fatalIfMissing=True) newest_generated_config_file_ts = GraalNodeJsBuildTask._get_newest_ts( _generated_config_files, fatalIfMissing=False) # Lazily generate config files only if `configure` and `configure.py` are older than the files they generate. # If we don't do this, the `Makefile` always considers `config.gypi` out of date, triggering a second, unnecessary configure. lazy_generator = ['--lazy-generator' ] if newest_generated_config_file_ts.isNewerThan( newest_config_file_ts) else [] if _is_windows: processDevkitRoot(env=build_env) _setEnvVar( 'PATH', pathsep.join([build_env['PATH']] + [ mx.library(lib_name).get_path(True) for lib_name in ('NASM', 'NINJA') ]), build_env) extra_flags = ['--ninja', '--dest-cpu=x64', '--without-etw'] else: extra_flags = [] _mxrun(python_cmd() + [ join(_suite.dir, 'configure'), '--partly-static', '--without-dtrace', '--without-inspector', '--without-node-snapshot', '--without-node-code-cache', '--java-home', _java_home(forBuild=True) ] + debug + shared_library + lazy_generator + extra_flags, cwd=_suite.dir, print_cmd=True, env=build_env) quiet_build = mx.is_continuous_integration( ) and not mx.get_opts().verbose if _is_windows: verbose = ['-v'] if mx.get_opts().verbose else [] # The custom env is not used to resolve the location of the executable _mxrun([join(mx.library('NINJA').get_path(True), 'ninja.exe')] + verbose + ['-j%d' % self.parallelism, '-C', self._build_dir], print_cmd=True, quiet_if_successful=quiet_build, env=build_env) else: verbose = 'V={}'.format('1' if mx.get_opts().verbose else '') _mxrun([mx.gmake_cmd(), '-j%d' % self.parallelism, verbose], cwd=_suite.dir, print_cmd=True, quiet_if_successful=quiet_build, env=build_env) # put headers for native modules into out/headers _setEnvVar('HEADERS_ONLY', '1', build_env) _mxrun(python_cmd() + [ join('tools', 'install.py'), 'install', join('out', 'headers'), sep ], quiet_if_successful=not mx.get_opts().verbose, env=build_env) post_ts = GraalNodeJsBuildTask._get_newest_ts( self.subject.getResults(), fatalIfMissing=True) mx.logv( 'Newest time-stamp before building: {}\nNewest time-stamp after building: {}\nHas built? {}' .format(pre_ts, post_ts, post_ts.isNewerThan(pre_ts))) built = post_ts.isNewerThan(pre_ts) if built and _current_os == 'darwin': nodePath = join(self._build_dir, 'node') _mxrun([ 'install_name_tool', '-add_rpath', join(_java_home(), 'jre', 'lib'), '-add_rpath', join(_java_home(), 'lib'), nodePath ], print_cmd=True, env=build_env) return built
def python_build_watch(args): """ Watch the suite and on any changes to .class, .jar, .h, or .c files rebuild. By default, rebuilds only the archives and non-Java projects. """ parser = ArgumentParser(prog='mx python-build-watch') parser.add_argument('--full', action='store_true', help='Run a full mx build', required=False) parser.add_argument('--graalvm', action='store_true', help='Build a graalvm', required=False) parser.add_argument('--no-java', action='store_true', help='Build only archives and native projects [default]', required=False) args = parser.parse_args(args) if sum([args.full, args.graalvm, args.no_java]) > 1: mx.abort("Only one of --full, --graalvm, --no-java can be specified") if args.full: # suffixes = [".c", ".h", ".class", ".jar", ".java"] excludes = [".*\\.py$"] elif args.graalvm: # suffixes = [".c", ".h", ".class", ".jar", ".java", ".py"] excludes = ["mx_.*\\.py$"] else: # suffixes = [".c", ".h", ".class", ".jar"] excludes = [".*\\.py$", ".*\\.java$"] cmd = ["inotifywait", "-q", "-e", "close_write,moved_to", "-r", "--format=%f"] for e in excludes: cmd += ["--exclude", e] cmd += ["@%s" % os.path.join(SUITE.dir, ".git"), SUITE.dir] cmd_qq = cmd[:] cmd_qq[1] = "-qq" was_quiet = mx.get_opts().quiet while True: out = mx.OutputCapture() if mx.run(cmd, out=out, nonZeroIsFatal=False) != 0: continue changed_file = out.data.strip() mx.logv(changed_file) if any(changed_file.endswith(ext) for ext in [".c", ".h", ".class", ".jar"]): if not mx.get_opts().quiet: sys.stdout.write("Build needed ") sys.stdout.flush() while True: # re-run this until it times out, which we'll interpret as quiet # time if not mx.get_opts().quiet: sys.stdout.write(".") sys.stdout.flush() mx.get_opts().quiet = True try: retcode = mx.run(cmd_qq, timeout=3, nonZeroIsFatal=False) finally: mx.get_opts().quiet = was_quiet if retcode == mx.ERROR_TIMEOUT: if not mx.get_opts().quiet: sys.stdout.write("\n") break mx.log("Building.") if args.full: mx.command_function("build")() elif args.graalvm: mx.log(python_gvm()) else: nativebuild([]) mx.log("Build done.")
def get_jacoco_dest_file(): return JACOCO_EXEC or mx.get_opts().jacoco_dest_file