def cinterfacetutorial(native_image, args=None): """Build and run the tutorial for the C interface""" args = [] if args is None else args tutorial_proj = mx.dependency('com.oracle.svm.tutorial') cSourceDir = join(tutorial_proj.dir, 'native') buildDir = join(svmbuild_dir(), tutorial_proj.name, 'build') # clean / create output directory if exists(buildDir): remove_tree(buildDir) mkpath(buildDir) # Build the shared library from Java code native_image(['--shared', '-H:Path=' + buildDir, '-H:Name=libcinterfacetutorial', '-H:CLibraryPath=' + tutorial_proj.dir, '-cp', tutorial_proj.output_dir()] + args) # Build the C executable mx.run(['cc', '-g', join(cSourceDir, 'cinterfacetutorial.c'), '-I' + buildDir, '-L' + buildDir, '-lcinterfacetutorial', '-ldl', '-Wl,-rpath,' + buildDir, '-o', join(buildDir, 'cinterfacetutorial')]) # Start the C executable mx.run([buildDir + '/cinterfacetutorial'])
def genInlineAssemblyParser(args=None, out=None): """generate inline assembly parser and scanner if corresponding grammer is new""" generatedParserDir = _inlineAssemblySrcDir + _inlineAssemblyPackageName.replace(".", "/") + "/" generatedFiles = [generatedParserDir + "Parser.java", generatedParserDir + "Scanner.java"] configFiles = [ _inlineAssemblySrcDir + "InlineAssembly.atg", _inlineAssemblySrcDir + "Parser.frame", _inlineAssemblySrcDir + "Scanner.frame", _inlineAssemblySrcDir + "Copyright.frame", ] isAllGeneratedFilesExists = all([isfile(fileName) for fileName in generatedFiles]) latestGeneratedFile = ( sorted(generatedFiles, key=os.path.getmtime, reverse=True)[0] if isAllGeneratedFilesExists else "" ) latestConfigFile = sorted(configFiles, key=os.path.getmtime, reverse=True)[0] # If any auto-generated file is missing or any config file is updated after last auto-generation then regenerate the files if not isAllGeneratedFilesExists or os.path.getmtime(latestConfigFile) >= os.path.getmtime(latestGeneratedFile): localCocoJarFile = _suite.dir + "/lib/Coco.jar" if not isfile(localCocoJarFile): jarFileUrls = ["https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/Coco.jar"] mx.download(localCocoJarFile, jarFileUrls) command = [ mx.get_jdk(tag="jvmci").java, "-jar", localCocoJarFile, "-package", _inlineAssemblyPackageName, "-o", generatedParserDir, _inlineAssemblyGrammer, ] mx.run(command) # Files get generated in Windows file format. Convert them to avoid style check failure during regression testing dos2unix(generatedFiles)
def _update_JDK9_STUBS_library(): """ Sets the "path" and "sha1" attributes of the "JDK9_STUBS" library. """ jdk9InternalLib = _suite.suiteDict['libraries']['JDK9_STUBS'] jarInputDir = join(_suite.get_output_root(), 'jdk9-stubs') jarPath = join(_suite.get_output_root(), 'jdk9-stubs.jar') stubs = [ ('jdk.internal.misc', 'VM', """package jdk.internal.misc; public class VM { public static String getSavedProperty(String key) { throw new InternalError("should not reach here"); } } """) ] if not exists(jarPath): sourceFiles = [] for (package, className, source) in stubs: sourceFile = join(jarInputDir, package.replace('.', os.sep), className + '.java') mx.ensure_dir_exists(os.path.dirname(sourceFile)) with open(sourceFile, 'w') as fp: fp.write(source) sourceFiles.append(sourceFile) jdk = mx.get_jdk(tag='default') mx.run([jdk.javac, '-d', jarInputDir] + sourceFiles) mx.run([jdk.jar, 'cf', jarPath, '.'], cwd=jarInputDir) jdk9InternalLib['path'] = jarPath jdk9InternalLib['sha1'] = mx.sha1OfFile(jarPath)
def helloworld_internal(native_image, path=svmbuild_dir(), javac_command=None, args=None): if javac_command is None: javac_command = ['javac'] args = [] if args is None else args mkpath(path) hello_file = join(path, 'HelloWorld.java') output = 'Hello from Substrate VM' with open(hello_file, 'w') as fp: fp.write('public class HelloWorld { public static void main(String[] args) { System.out.println("' + output + '"); } }') # Run javac. We sometimes run with an image so annotation processing must be disabled because it requires dynamic # class loading, and we need to set the bootclasspath manually because our build directory does not contain any # .jar files. mx.run(javac_command + [hello_file]) native_image(["-H:Path=" + path, '-cp', path, 'HelloWorld'] + args) expectedOutput = [output + '\n'] actualOutput = [] def _collector(x): actualOutput.append(x) mx.log(x) mx.run([join(path, 'helloworld')], out=_collector) if actualOutput != expectedOutput: raise Exception('Wrong output: ' + str(actualOutput) + " != " + str(expectedOutput))
def _gnur_install_test(pkgs, gnur_libinstall, gnur_install_tmp): gnur_packages = join(_fastr_suite_dir(), 'gnur.packages') with open(gnur_packages, 'w') as f: for pkg in pkgs: f.write(pkg) f.write('\n') env = os.environ.copy() env["TMPDIR"] = gnur_install_tmp env['R_LIBS_USER'] = gnur_libinstall env["TZDIR"] = "/usr/share/zoneinfo/" args = [] if _graalvm(): args += [_gnur_rscript()] args += [_installpkgs_script()] args += ['--pkg-filelist', gnur_packages] args += ['--run-tests'] # GNU R will abort the entire run otherwise if a failure occurs # args += ['--run-mode', 'internal'] args += ['--ignore-blacklist'] args += ['--testdir', 'test.gnur'] _log_step('BEGIN', 'install/test', 'GnuR') if _graalvm(): mx.run(args, nonZeroIsFatal=False, env=env) else: mx_fastr.gnu_rscript(args, env=env) _log_step('END', 'install/test', 'GnuR')
def _run_netbeans_app(app_name, env=None, args=None): args = [] if args is None else args dist = app_name.upper() + '_DIST' name = app_name.lower() extractPath = join(_suite.get_output_root()) if mx.get_os() == 'windows': executable = join(extractPath, name, 'bin', name + '.exe') else: executable = join(extractPath, name, 'bin', name) # Check whether the current installation is up-to-date if exists(executable) and not exists(mx.library(dist).get_path(resolve=False)): mx.log('Updating ' + app_name) shutil.rmtree(join(extractPath, name)) archive = mx.library(dist).get_path(resolve=True) if not exists(executable): zf = zipfile.ZipFile(archive, 'r') zf.extractall(extractPath) if not exists(executable): mx.abort(app_name + ' binary does not exist: ' + executable) if mx.get_os() != 'windows': # Make sure that execution is allowed. The zip file does not always specfiy that correctly os.chmod(executable, 0777) mx.run([executable]+args, env=env)
def pullInstallDragonEgg(args=None): """downloads and installs dragonegg (assumes that compatible GCC and G++ versions are installed)""" toolDir = join(_toolDir, "tools/dragonegg") mx.ensure_dir_exists(toolDir) url = "https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/dragonegg-3.2.src.tar.gz" localPath = pullsuite(toolDir, [url]) tar(localPath, toolDir) os.remove(localPath) if mx.get_os() == "darwin": gccToolDir = join(_toolDir, "tools/gcc") url = "https://lafo.ssw.uni-linz.ac.at/pub/sulong-deps/gcc-4.6.4.tar.gz" localPath = pullsuite(gccToolDir, [url]) tar(localPath, gccToolDir) os.remove(localPath) mx.run( ["patch", "-p1", _toolDir + "tools/dragonegg/dragonegg-3.2.src/Makefile", "mx.sulong/dragonegg-mac.patch"] ) os.environ["GCC"] = getGCC() os.environ["CXX"] = getGPP() os.environ["CC"] = getGCC() pullLLVMBinaries() os.environ["LLVM_CONFIG"] = findLLVMProgram("llvm-config") print os.environ["LLVM_CONFIG"] compileCommand = ["make"] return mx.run(compileCommand, cwd=_toolDir + "tools/dragonegg/dragonegg-3.2.src")
def _runmake(args): """run the JDK make process To build hotspot and import it into the JDK: "mx make hotspot import-hotspot" {0}""" jdkBuildDir = _get_jdk_build_dir() if not exists(jdkBuildDir): # JDK9 must be bootstrapped with a JDK8 compliance = mx.JavaCompliance('8') jdk8 = mx.get_jdk(compliance.exactMatch, versionDescription=compliance.value) cmd = ['sh', 'configure', '--with-debug-level=' + _vm.debugLevel, '--with-native-debug-symbols=external', '--disable-precompiled-headers', '--with-jvm-features=graal', '--with-jvm-variants=' + _vm.jvmVariant, '--disable-warnings-as-errors', '--with-boot-jdk=' + jdk8.home, '--with-jvm-features=graal'] mx.run(cmd, cwd=_jdkSourceRoot) cmd = [mx.gmake_cmd(), 'CONF=' + _vm.debugLevel] if mx.get_opts().verbose: cmd.append('LOG=debug') cmd.extend(args) if mx.get_opts().use_jdk_image and 'images' not in args: cmd.append('images') if not mx.get_opts().verbose: mx.log('--------------- make execution ----------------------') mx.log('Working directory: ' + _jdkSourceRoot) mx.log('Command line: ' + ' '.join(cmd)) mx.log('-----------------------------------------------------') mx.run(cmd, cwd=_jdkSourceRoot)
def c1visualizer(args): """run the Cl Compiler Visualizer""" libpath = join(_suite.dir, 'lib') if mx.get_os() == 'windows': executable = join(libpath, 'c1visualizer', 'bin', 'c1visualizer.exe') else: executable = join(libpath, 'c1visualizer', 'bin', 'c1visualizer') # Check whether the current C1Visualizer installation is the up-to-date if exists(executable) and not exists(mx.library('C1VISUALIZER_DIST').get_path(resolve=False)): mx.log('Updating C1Visualizer') shutil.rmtree(join(libpath, 'c1visualizer')) archive = mx.library('C1VISUALIZER_DIST').get_path(resolve=True) if not exists(executable): zf = zipfile.ZipFile(archive, 'r') zf.extractall(libpath) if not exists(executable): mx.abort('C1Visualizer binary does not exist: ' + executable) if mx.get_os() != 'windows': # Make sure that execution is allowed. The zip file does not always specfiy that correctly os.chmod(executable, 0777) mx.run([executable])
def jdkartifactstats(args): """show stats about JDK deployed Graal artifacts""" artifacts = {} jdkDir = get_jvmci_jdk().home def _getDeployedJars(): if JVMCI_VERSION < 9: for root, _, filenames in os.walk(join(jdkDir, 'jre', 'lib')): for f in filenames: if f.endswith('.jar') and not f.endswith('.stripped.jar'): yield join(root, f) else: for jdkDist in jdkDeployedDists: dist = jdkDist.dist() if isinstance(jdkDist, JvmciJDKDeployedDist): yield dist.path for jar in _getDeployedJars(): f = basename(jar) if 'truffle' in f: if 'enterprise' in f: artifacts.setdefault('GraalEnterpriseTruffle', []).append(jar) else: artifacts.setdefault('GraalTruffle', []).append(jar) elif 'enterprise' in f: artifacts.setdefault('GraalEnterprise', []).append(jar) elif 'jvmci' in f: artifacts.setdefault('JVMCI', []).append(jar) elif 'graal' in f: artifacts.setdefault('Graal', []).append(jar) else: mx.logv('ignored: ' + jar) print '{:>10} {:>10} {:>10} {}'.format('All', 'NoVars', 'None', 'Jar') for category in sorted(artifacts.viewkeys()): jars = artifacts[category] if jars: totals = (0, 0, 0) print for j in jars: gSize = os.path.getsize(j) stripped = j[:-len('.jar')] + '.stripped.jar' mx.run([mx.get_jdk().pack200, '--repack', '--quiet', '-J-Djava.util.logging.config.file=', '-DLocalVariableTypeTable=strip', '-DLocalVariableTable=strip', stripped, j]) gLinesSourceSize = os.path.getsize(stripped) mx.run([mx.get_jdk().pack200, '--repack', '--quiet', '-J-Djava.util.logging.config.file=', '-G', stripped, j]) gNoneSize = os.path.getsize(stripped) os.remove(stripped) print '{:10,} {:10,} {:10,} {}:{}'.format(gSize, gLinesSourceSize, gNoneSize, category, basename(j)) t1, t2, t3 = totals totals = (t1 + gSize, t2 + gLinesSourceSize, t3 + gNoneSize) t1, t2, t3 = totals print '{:10,} {:10,} {:10,} {}'.format(t1, t2, t3, category) jvmLib = join(jdkDir, relativeVmLibDirInJdk(), get_vm(), mx.add_lib_suffix(mx.add_lib_prefix('jvm'))) print if exists(jvmLib): print '{:10,} {}'.format(os.path.getsize(jvmLib), jvmLib) else: print '{:>10} {}'.format('<missing>', jvmLib)
def build(self): cwd = _suite.dir maven_repo_arg, env = mavenSetup() mx.log("Building jruby-core with Maven") mx.run_maven(['-q', '-DskipTests', maven_repo_arg, '-pl', 'core,lib'], cwd=cwd, env=env) # Install Bundler gem_home = join(_suite.dir, 'lib', 'ruby', 'gems', 'shared') env['GEM_HOME'] = gem_home env['GEM_PATH'] = gem_home mx.run(['bin/jruby', 'bin/gem', 'install', 'bundler', '-v', '1.10.6'], cwd=cwd, env=env)
def build(self): cwd = _suite.dir maven_repo_arg, env = mavenSetup() mx.log("Building jruby-core with Maven") quiet = [] if mx.get_opts().verbose else ['-q'] mx.run_maven(quiet + ['-DskipTests', maven_repo_arg, '-Dcreate.sources.jar', '-pl', 'core,lib'], cwd=cwd, env=env) # Install Bundler gem_home = join(_suite.dir, 'lib', 'ruby', 'gems', 'shared') env['GEM_HOME'] = gem_home env['GEM_PATH'] = gem_home mx.run(['bin/jruby', 'bin/gem', 'install', 'bundler', '-v', '1.10.6'], cwd=cwd, env=env)
def clangBench(args=None): """ Executes a benchmark with the system default Clang""" _, inputFiles = extract_compiler_args(args) _, ext = os.path.splitext(inputFiles[0]) if ext == '.c': mx.run(['clang'] + args + standardLinkerCommands()) elif ext == '.cpp': mx.run(['clang++'] + args + standardLinkerCommands()) else: exit(ext + " is not supported!") return mx.run(['./a.out'])
def _configs(): class Configs: def __init__(self): self.configs = dict() def eat(self, line): (k, v) = line.split('#') self.configs[k] = v.rstrip() c = Configs() mx.run([mx.java().java, '-client', '-Xmx40m', '-Xms40m', '-XX:NewSize=30m', '-cp', mx.classpath(resolve=False), 'test.com.sun.max.vm.MaxineTesterConfiguration'], out=c.eat) return c.configs
def gccBench(args=None): """ executes a benchmark with the system default GCC version""" _, inputFiles = extract_compiler_args(args) _, ext = os.path.splitext(inputFiles[0]) if ext == '.c': mx.run(['gcc', '-std=gnu99'] + args + standardLinkerCommands()) elif ext == '.cpp': mx.run(['g++'] + args + standardLinkerCommands()) else: exit(ext + " is not supported!") return mx.run(['./a.out'])
def mavenSetup(): mavenDir = join(_suite.dir, 'mxbuild', 'mvn') maven_repo_arg = '-Dmaven.repo.local=' + mavenDir env = os.environ.copy() env['JRUBY_BUILD_MORE_QUIET'] = 'true' # HACK: since the maven executable plugin does not configure the # java executable that is used we unfortunately need to append it to the PATH javaHome = os.getenv('JAVA_HOME') if javaHome: env["PATH"] = javaHome + '/bin' + os.pathsep + env["PATH"] mx.logv('Setting PATH to {}'.format(os.environ["PATH"])) mx.run(['java', '-version']) return maven_repo_arg, env
def inspectoragent(args): """launch the Inspector agent Launch the Inspector agent. The agent listens on a given port for an incoming connection from a remote Inspector process.""" cmd = mx.java().format_cmd(['-cp', mx.classpath(), 'com.sun.max.tele.channel.agent.InspectorAgent'] + args) if mx.get_os() == 'darwin': # The -E option propagates the environment variables into the sudo process mx.run(['sudo', '-E', '-p', 'Debugging is a privileged operation on Mac OS X.\nPlease enter your "sudo" password:'] + cmd) else: mx.run(cmd)
def pullInstallDragonEgg(args=None): """downloads and installs dragonegg (assumes that GCC 4.6 is on the path)""" if hasDragoneggGCCInstalled(): toolDir = join(_toolDir, "tools/dragonegg") mx.ensure_dir_exists(toolDir) url = 'http://llvm.org/releases/3.2/dragonegg-3.2.src.tar.gz' localPath = pullsuite(toolDir, [url]) tar(localPath, toolDir) os.remove(localPath) os.environ['GCC'] = 'gcc-4.6' os.environ['LLVM_CONFIG'] = _toolDir + 'tools/llvm/bin/llvm-config' compileCommand = ['make'] mx.run(compileCommand, cwd=_toolDir + 'tools/dragonegg/dragonegg-3.2.src') else: print 'could not find gcc-4.6, skip installing dragonegg!'
def mavenSetup(): buildPack = join(_suite.dir, 'jruby-build-pack/maven') mavenDir = buildPack if isdir(buildPack) else join(_suite.dir, 'mxbuild/mvn') maven_repo_arg = '-Dmaven.repo.local=' + mavenDir env = os.environ.copy() if not mx.get_opts().verbose: env['JRUBY_BUILD_MORE_QUIET'] = 'true' # HACK: since the maven executable plugin does not configure the # java executable that is used we unfortunately need to prepend it to the PATH javaHome = os.getenv('JAVA_HOME') if javaHome: env["PATH"] = javaHome + '/bin' + os.pathsep + env["PATH"] mx.logv('Setting PATH to {}'.format(os.environ["PATH"])) mx.run(['java', '-version'], env=env) return maven_repo_arg, env
def run_java(jdk, args, nonZeroIsFatal=True, out=None, err=None, cwd=None, timeout=None, env=None, addDefaultArgs=True): args = _parseVmArgs(jdk, args, addDefaultArgs=addDefaultArgs) jvmciModeArgs = _jvmciModes[_vm.jvmciMode] cmd = [jdk.java] + ['-' + get_vm()] + jvmciModeArgs + args return mx.run(cmd, nonZeroIsFatal=nonZeroIsFatal, out=out, err=err, cwd=cwd)
def js_image_test(binary, bench_location, name, warmup_iterations, iterations, timeout=None, bin_args=None): bin_args = bin_args if bin_args is not None else [] jsruncmd = [binary] + bin_args + [join(bench_location, 'harness.js'), '--', join(bench_location, name + '.js'), '--', '--warmup-iterations=' + str(warmup_iterations), '--iterations=' + str(iterations)] mx.log(' '.join(jsruncmd)) passing = [] stdoutdata = [] def stdout_collector(x): stdoutdata.append(x) mx.log(x.rstrip()) stderrdata = [] def stderr_collector(x): stderrdata.append(x) mx.warn(x.rstrip()) returncode = mx.run(jsruncmd, cwd=bench_location, out=stdout_collector, err=stderr_collector, nonZeroIsFatal=False, timeout=timeout) if returncode == mx.ERROR_TIMEOUT: print('INFO: TIMEOUT (> %d): %s' % (timeout, name)) elif returncode >= 0: matches = 0 for line in stdoutdata: if re.match(r'^\S+: *\d+(\.\d+)?\s*$', line): matches += 1 if matches > 0: passing = stdoutdata if not passing: mx.abort('JS benchmark ' + name + ' failed')
def igv(args): """run the Ideal Graph Visualizer""" logFile = '.ideal_graph_visualizer.log' with open(join(_suite.dir, logFile), 'w') as fp: mx.logv('[Ideal Graph Visualizer log is in ' + fp.name + ']') nbplatform = join(_suite.dir, 'src', 'share', 'tools', 'IdealGraphVisualizer', 'nbplatform') # Remove NetBeans platform if it is earlier than the current supported version if exists(nbplatform): updateTrackingFile = join(nbplatform, 'platform', 'update_tracking', 'org-netbeans-core.xml') if not exists(updateTrackingFile): mx.log('Could not find \'' + updateTrackingFile + '\', removing NetBeans platform') shutil.rmtree(nbplatform) else: dom = xml.dom.minidom.parse(updateTrackingFile) currentVersion = mx.VersionSpec(dom.getElementsByTagName('module_version')[0].getAttribute('specification_version')) supportedVersion = mx.VersionSpec('3.43.1') if currentVersion < supportedVersion: mx.log('Replacing NetBeans platform version ' + str(currentVersion) + ' with version ' + str(supportedVersion)) shutil.rmtree(nbplatform) elif supportedVersion < currentVersion: mx.log('Supported NetBeans version in igv command should be updated to ' + str(currentVersion)) if not exists(nbplatform): mx.logv('[This execution may take a while as the NetBeans platform needs to be downloaded]') env = _igvBuildEnv() # make the jar for Batik 1.7 available. env['IGV_BATIK_JAR'] = mx.library('BATIK').get_path(True) if mx.run(['ant', '-f', mx._cygpathU2W(join(_suite.dir, 'src', 'share', 'tools', 'IdealGraphVisualizer', 'build.xml')), '-l', mx._cygpathU2W(fp.name), 'run'], env=env, nonZeroIsFatal=False): mx.abort("IGV ant build & launch failed. Check '" + logFile + "'. You can also try to delete 'src/share/tools/IdealGraphVisualizer/nbplatform'.")
def cc(args): _log('fastr:cc', args) compiler = None sulong = _sulong() if sulong: analyzed_args = _analyze_args(args) if _is_linux(): rc = sulong.compileWithGCC(analyzed_args.compile_args) if rc == 0 and analyzed_args.llvm_ir_file: if not analyzed_args.is_link: rc = sulong.compileWithGCC(analyzed_args.emit_llvm_args) elif _is_darwin(): rc = sulong.compileWithClang(analyzed_args.compile_args) if rc == 0 and analyzed_args.llvm_ir_file: if not analyzed_args.is_link: rc = sulong.compileWithClang(analyzed_args.emit_llvm_args) else: mx.abort('unsupported platform') if rc == 0 and not analyzed_args.is_link and analyzed_args.llvm_ir_file: rc = _mem2reg_opt(analyzed_args.llvm_ir_file) if rc == 0: rc = _embed_ir(analyzed_args.llvm_ir_file) else: if _is_linux(): compiler = 'gcc' elif _is_darwin(): compiler = 'clang' else: mx.abort('unsupported platform') rc = mx.run([compiler] + args, nonZeroIsFatal=False) return rc
def gnu_rscript(args, env=None): ''' run the internally built GNU Rscript executable env arg is used by pkgtest ''' cmd = [join(_gnur_path(), 'Rscript')] + args return mx.run(cmd, nonZeroIsFatal=False, env=env)
def run(self, cwd, args): # save current Directory self.currentDir = os.getcwd() os.chdir(_benchmarksDirectory()) f = open(os.devnull, 'w') mx_sulong.ensureLLVMBinariesExist() benchmarkDir = args[0] # enter benchmark dir os.chdir(benchmarkDir) # create directory for executable of this vm if not os.path.exists(self.name()): os.makedirs(self.name()) os.chdir(self.name()) if os.path.exists('bench'): os.remove('bench') env = os.environ.copy() env['CFLAGS'] = ' '.join(_env_flags + ['-lm', '-lgmp']) env['LLVM_COMPILER'] = 'clang' env['CC'] = 'wllvm' env['VPATH'] = '..' cmdline = ['make', '-f', '../Makefile'] mx.run(cmdline, out=f, err=f, env=env) mx.run(['extract-bc', 'bench'], out=f, err=f) mx_sulong.opt(['-o', 'bench.bc', 'bench.bc'] + ['-mem2reg', '-globalopt', '-simplifycfg', '-constprop', '-instcombine', '-dse', '-loop-simplify', '-reassociate', '-licm', '-gvn'], out=f, err=f) suTruffleOptions = [ '-Dgraal.TruffleBackgroundCompilation=false', '-Dgraal.TruffleTimeThreshold=1000000', '-Dgraal.TruffleInliningMaxCallerSize=10000', '-Dgraal.TruffleCompilationExceptionsAreFatal=true', mx_subst.path_substitutions.substitute('-Dpolyglot.llvm.libraryPath=<path:SULONG_LIBS>'), '-Dpolyglot.llvm.libraries=libgmp.so.10'] sulongCmdLine = suTruffleOptions + mx_sulong.getClasspathOptions() + ['-XX:-UseJVMCIClassLoader', "com.oracle.truffle.llvm.launcher.LLVMLauncher"] + ['bench.bc'] result = self.host_vm().run(cwd, sulongCmdLine + args) # reset current Directory os.chdir(self.currentDir) return result
def runTool(self, args, errorMsg=None): try: if not mx.get_opts().verbose: f = open(os.devnull, 'w') ret = mx.run(args, out=f, err=f) else: f = None ret = mx.run(args) except SystemExit: ret = -1 if errorMsg is None: print('\nError: Cannot run %s' % args) else: print('\nError: %s\n%s' % (errorMsg, ' '.join(args))) if f is not None: f.close() return ret
def _jvmci_gate_runner(args, tasks): # Build release server VM now so we can run the unit tests with Task('BuildHotSpotJVMCIHosted: release', tasks) as t: if t: _runmultimake(['--jdk-jvm-variants', 'server', '--jdk-debug-levels', 'release']) # Run unit tests in hosted mode with VM(jvmVariant='server', debugLevel='release', jvmciMode='hosted'): with Task('JVMCI UnitTests: hosted-release', tasks) as t: if t: unittest(['--suite', 'jvmci', '--enable-timing', '--verbose', '--fail-fast']) # Build the other VM flavors with Task('BuildHotSpotJVMCIOthers: fastdebug', tasks) as t: if t: _runmultimake(['--jdk-jvm-variants', 'server', '--jdk-debug-levels', 'fastdebug']) with Task('CleanAndBuildIdealGraphVisualizer', tasks, disableJacoco=True) as t: if t and platform.processor() != 'sparc': buildxml = mx._cygpathU2W(join(_suite.dir, 'src', 'share', 'tools', 'IdealGraphVisualizer', 'build.xml')) mx.run(['ant', '-f', buildxml, '-q', 'clean', 'build'], env=_igvBuildEnv())
def run_java(self, args, vm=None, nonZeroIsFatal=True, out=None, err=None, cwd=None, timeout=None, env=None, addDefaultArgs=True): if vm is None: vm = 'server' args = self.parseVmArgs(args, addDefaultArgs=addDefaultArgs) jvmciModeArgs = _jvmciModes[_vm.jvmciMode] cmd = [self.java] + ['-' + vm] + jvmciModeArgs + args return mx.run(cmd, nonZeroIsFatal=nonZeroIsFatal, out=out, err=err, cwd=cwd)
def _runmake(args): """run the JDK make process To build hotspot and import it into the JDK: "mx make hotspot import-hotspot" {0}""" jdkBuildDir = _get_jdk_build_dir() if not exists(jdkBuildDir): # JDK9 must be bootstrapped with a JDK8 compliance = mx.JavaCompliance('8') jdk8 = mx.get_jdk(compliance.exactMatch, versionDescription=compliance.value) cmd = ['sh', 'configure', '--with-debug-level=' + _vm.debugLevel, '--with-native-debug-symbols=external', '--disable-precompiled-headers', '--with-jvm-variants=' + _vm.jvmVariant, '--disable-warnings-as-errors', '--with-boot-jdk=' + jdk8.home] mx.run(cmd, cwd=_jdkSourceRoot) cmd = [mx.gmake_cmd(), 'CONF=' + _vm.debugLevel] if mx.get_opts().verbose: cmd.append('LOG=debug') cmd.extend(args) if mx.get_opts().use_jdk_image and 'images' not in args: cmd.append('images') if not mx.get_opts().verbose: mx.log('--------------- make execution ----------------------') mx.log('Working directory: ' + _jdkSourceRoot) mx.log('Command line: ' + ' '.join(cmd)) mx.log('-----------------------------------------------------') mx.run(cmd, cwd=_jdkSourceRoot) if 'images' in cmd: jdkImageDir = join(jdkBuildDir, 'images', 'jdk') # The OpenJDK build creates an empty cacerts file so copy one from # the default JDK (which is assumed to be an OracleJDK) srcCerts = join(mx.get_jdk(tag='default').home, 'lib', 'security', 'cacerts') if not exists(srcCerts): # Might be building with JDK8 which has cacerts under jre/ srcCerts = join(mx.get_jdk(tag='default').home, 'jre', 'lib', 'security', 'cacerts') dstCerts = join(jdkImageDir, 'lib', 'security', 'cacerts') if srcCerts != dstCerts: shutil.copyfile(srcCerts, dstCerts) _create_jdk_bundle(jdkBuildDir, _vm.debugLevel, jdkImageDir)
def fc(args): _log('fastr:fc', args) compiler = None sulong = _sulong() if sulong: analyzed_args = _analyze_args(args, dragonEgg=True) rc = mx.run([sulong.getGFortran()] + analyzed_args.compile_args, nonZeroIsFatal=False) if rc == 0: rc = sulong.dragonEggGFortran(analyzed_args.emit_llvm_args) if rc == 0 and analyzed_args.llvm_ir_file: # create bitcode from textual IR llvm_as = sulong.findLLVMProgram('llvm-as') llvm_bc_file = os.path.splitext(analyzed_args.llvm_ir_file)[0] + '.bc' rc = mx.run([llvm_as, analyzed_args.llvm_ir_file, '-o', llvm_bc_file]) rc = _embed_ir(llvm_bc_file) else: compiler = 'gfortran' rc = mx.run([compiler] + args, nonZeroIsFatal=False) return rc
def coverage_upload(args): parser = ArgumentParser(prog='mx coverage-upload') parser.add_argument('--upload-url', required=False, default=mx.get_env('COVERAGE_UPLOAD_URL'), help='Format is like rsync: user@host:/directory') parser.add_argument('--build-name', required=False, default=mx.get_env('BUILD_NAME')) parser.add_argument('--build-url', required=False, default=mx.get_env('BUILD_URL')) parser.add_argument('--build-number', required=False, default=mx.get_env('BUILD_NUMBER')) args, other_args = parser.parse_known_args(args) if not args.upload_url: parser.print_help() return remote_host, remote_basedir = args.upload_url.split(':') if not remote_host: mx.abort('Cannot determine remote host from {}'.format(args.upload_url)) primary = mx.primary_suite() info = primary.vc.parent_info(primary.dir) rev = primary.vc.parent(primary.dir) if len(remote_basedir) > 0 and not remote_basedir.endswith('/'): remote_basedir += '/' remote_dir = '{}_{}_{}'.format(primary.name, datetime.datetime.fromtimestamp(info['author-ts']).strftime('%Y-%m-%d_%H_%M'), rev[:7]) if args.build_name: remote_dir += '_' + args.build_name if args.build_number: remote_dir += '_' + args.build_number upload_dir = remote_basedir + remote_dir includes, excludes = _jacocoreport(['--omit-excluded'] + other_args) # Upload jar+sources coverage_sources = 'java_sources.tar.gz' coverage_binaries = 'java_binaries.tar.gz' with mx.Archiver(os.path.realpath(coverage_sources), kind='tgz') as sources, mx.Archiver(os.path.realpath(coverage_binaries), kind='tgz') as binaries: def _visit_deps(dep, edge): if dep.isJavaProject() and not dep.is_test_project(): binaries.zf.add(dep.output_dir(), dep.name) for d in dep.source_dirs(): sources.zf.add(d, dep.name) if os.path.exists(dep.source_gen_dir()): sources.zf.add(dep.source_gen_dir(), dep.name) mx.walk_deps(mx.projects(), visit=_visit_deps) files = [JACOCO_EXEC, 'coverage', coverage_sources, coverage_binaries] print("Syncing {} to {}:{}".format(" ".join(files), remote_host, upload_dir)) mx.run([ 'bash', '-c', r'tar -czf - {files} | ssh {remote} bash -c \'"mkdir -p {remotedir} && cd {remotedir} && cat | tar -x{verbose}z && chmod -R 755 ."\'' .format( files=" ".join(files), remote=remote_host, remotedir=upload_dir, verbose='v' if mx._opts.verbose else '') ]) def upload_string(content, path): mx.run(['ssh', remote_host, 'bash', '-c', 'cat > "' + path + '"'], stdin=content) upload_string(json.dumps({ 'timestamp': time.time(), 'suite': primary.name, 'revision': rev, 'directory': remote_dir, 'build_name': args.build_name, 'build_url': args.build_url, 'jdk_version': str(mx.get_jdk().version), 'build_number': args.build_number, 'primary_info': info, 'excludes': [str(e) for e in excludes], 'includes': [str(i) for i in includes]}), upload_dir + '/description.json') mx.run(['ssh', remote_host, 'bash', '-c', r'"(echo \[; for i in {remote_basedir}/*/description.json; do if \[ -s \$i \];then cat \$i; echo ,; fi done; echo null\]) > {remote_basedir}/index.json"'.format(remote_basedir=remote_basedir)]) upload_string("""<html> <script language="javascript"> function urlChange(url) { if (url.pathname !== "blank") { window.history.replaceState(null, null, url.pathname.replace("/coverage_upload/", "/coverage_upload/#")) } } </script> <frameset rows="40,*"> <frame id="navigation" src="navigation.html"/> <frame id="content" src="" onload="urlChange(this.contentWindow.location);" /> </frameset> </html>""", remote_basedir + '/index.html') js_library_url = rewriteurl("https://ajax.googleapis.com/ajax/libs/angularjs/1.7.7/angular.js") upload_string(r"""<html> <head> <script src="%js_library_url"></script> <script language="javascript"> var App = angular.module('myApp', []) .controller('IndexCtrl', function IndexCtrl($scope, $http) { var hash = parent.window.location.hash; if(hash) { hash = hash.substring(1, hash.length); // remove leading hash } $http.get('index.json').then(function(response, status) { var data = response.data.filter(x => x != null); /* #GR-17399 Filter build that are unique per suite with revision as key and merge builds. */ data = data .filter(x => !x.hasOwnProperty('merge')) .filter( // filter builds that are unique per suite with revision as key x => !data .filter(z => x != z && x.suite == z.suite) // exclude self build and build for other suites. .map(z => z.revision) // map from array of build to array of revision .includes(x.revision) // check if revision of x is index data. ).concat(data.filter(x => x.hasOwnProperty('merge'))); // concat unique build with merged build. data.sort((l,r) => r.timestamp - l.timestamp); if(data.length > 0) { var startdir; if(hash) { startdir = data.find(build => hash.includes(build.directory)); startdir.hash = hash; } if(!startdir) { startdir = data[0]; } $scope.directory = startdir; } $scope.data = data; }); $scope.$watch('directory', (dir, olddir) => { if(dir) { var content = parent.document.getElementById("content"); var contentDocument = content.contentDocument || content.contentWindow.document; var newpath; if(olddir && olddir.suite === dir.suite) { newpath = contentDocument.location.href.replace(olddir.directory, dir.directory); } else { newpath = dir.hasOwnProperty('hash') ? hash : dir.directory + "/coverage/"; } contentDocument.location.href = newpath; parent.window.history.replaceState(undefined, undefined, "#" + newpath.replace(/^.+coverage_upload\//, "")); } }); $scope.step = (i) => $scope.directory = $scope.data[$scope.data.indexOf($scope.directory)+i]; }); function copy(url) { var content = parent.document.getElementById("content"); var contentDocument = content.contentDocument || content.contentWindow.document; var copyText = document.getElementById("copy"); copyText.value = contentDocument.location.href.replace("coverage_upload/", "coverage_upload/#"); copyText.select(); document.execCommand("copy"); } </script> </head> <body ng-app="myApp" ng-controller="IndexCtrl"> <button ng-click="step(1)" ng-disabled="data.indexOf(directory) >= data.length-1"><<</button> <button ng-click="step(-1)" ng-disabled="data.indexOf(directory) <= 0">>></button> <select ng-model="directory" ng-options="(i.primary_info['author-ts']*1000|date:'yy-MM-dd hh:mm') + ' ' + i.build_name + ' ' + i.revision.substr(0,8) group by i.suite for i in data"></select> <a href="{{directory.build_url}}" ng-if="directory.build_url" target="_blank">Build</a> Commit: {{directory.revision.substr(0,5)}}: {{directory.primary_info.description}} <input type="text" style="opacity: 0;width: 20;" id="copy" /> <button style="float: right;" onclick="copy(window.location);">Share url</button> </body> </html>""".replace("%js_library_url", js_library_url), remote_basedir + '/navigation.html')
def upload_string(content, path): mx.run(['ssh', remote_host, 'bash', '-c', 'cat > "' + path + '"'], stdin=content)
def opt(args=None, version=None, out=None, err=None): """runs opt""" return mx.run([findLLVMProgram('opt', version)] + args, out=out, err=err)
def gate_body(args, tasks): with Task('Vm: Basic GraalVM Tests', tasks, tags=[VmGateTasks.compiler]) as t: if t and mx_vm.has_component('Graal compiler'): # 1. a full open-source build should be built with an open-source JDK but we allow Oracle JDK in non-strict mode as it is common on developer machines # 2. the build must be a GraalVM # 3. the build must be JVMCI-enabled since the 'Graal compiler' component is registered version_regex = mx_vm.openjdk_version_regex if args.strict_mode else mx_vm.anyjdk_version_regex mx_vm.check_versions( mx_vm.graalvm_output(), version_regex, graalvm_version_regex=mx_vm.graalvm_version_regex, expect_graalvm=True, check_jvmci=True) with Task('Vm: Sulong tests', tasks, tags=[VmGateTasks.sulong]) as t: if t and mx_vm.has_component('Sulong', fatalIfMissing=True): pass with Task('Vm: Graal.js tests', tasks, tags=[VmGateTasks.graal_js_all]) as t: if t and mx_vm.has_component('Graal.js', fatalIfMissing=True): pass with Task('Vm: Graal.nodejs tests', tasks, tags=[VmGateTasks.graal_nodejs]) as t: if t and mx_vm.has_component('Graal.nodejs', fatalIfMissing=True): pass with Task('Vm: TruffleRuby tests', tasks, tags=[VmGateTasks.truffleruby]) as t: if t and mx_vm.has_component('TruffleRuby', fatalIfMissing=True): pass with Task('Vm: FastR tests', tasks, tags=[VmGateTasks.fastr]) as t: if t and mx_vm.has_component('FastR', fatalIfMissing=True): pass with Task('Vm: Graal.Python tests', tasks, tags=[VmGateTasks.graalpython]) as t: if t and mx_vm.has_component('Graal.Python', fatalIfMissing=True): pass if mx_vm.has_component('LibGraal'): libgraal_location = mx_vm.get_native_image_locations( 'LibGraal', 'jvmcicompiler') if libgraal_location is None: mx.warn( "Skipping libgraal tests: no library enabled in the LibGraal component" ) else: extra_vm_arguments = [ '-XX:+UseJVMCICompiler', '-XX:+UseJVMCINativeLibrary', '-XX:JVMCILibPath=' + dirname(libgraal_location) ] if args.extra_vm_argument: extra_vm_arguments += args.extra_vm_argument import mx_compiler # run avrora on the GraalVM binary itself with Task('LibGraal Compiler:GraalVM DaCapo-avrora', tasks, tags=[VmGateTasks.libgraal]) as t: if t: mx.run([ join(mx_vm.graalvm_home(), 'bin', 'java'), '-XX:+UseJVMCICompiler', '-XX:+UseJVMCINativeLibrary', '-jar', mx.library('DACAPO').get_path(True), 'avrora' ]) with Task('LibGraal Compiler:CTW', tasks, tags=[VmGateTasks.libgraal]) as t: if t: mx_compiler.ctw([ '-DCompileTheWorld.Config=Inline=false CompilationFailureAction=ExitVM', '-esa', '-XX:+EnableJVMCI', '-DCompileTheWorld.MultiThreaded=true', '-Dgraal.InlineDuringParsing=false', '-Dgraal.TrackNodeSourcePosition=true', '-DCompileTheWorld.Verbose=false', '-XX:ReservedCodeCacheSize=300m', ], extra_vm_arguments) mx_compiler.compiler_gate_benchmark_runner( tasks, extra_vm_arguments, prefix='LibGraal Compiler:') with Task('LibGraal Truffle:unittest', tasks, tags=[VmGateTasks.libgraal]) as t: if t: def _unittest_config_participant(config): vmArgs, mainClass, mainClassArgs = config newVmArgs = [ arg for arg in vmArgs if arg != "-Dtruffle.TruffleRuntime=com.oracle.truffle.api.impl.DefaultTruffleRuntime" ] return (newVmArgs, mainClass, mainClassArgs) mx_unittest.add_config_participant( _unittest_config_participant) excluded_tests = environ.get("TEST_LIBGRAAL_EXCLUDE") if excluded_tests: with NamedTemporaryFile(prefix='blacklist.', mode='w', delete=False) as fp: fp.file.writelines( [l + '\n' for l in excluded_tests.split()]) unittest_args = ["--blacklist", fp.name] else: unittest_args = [] unittest_args = unittest_args + [ "--enable-timing", "--verbose" ] mx_unittest.unittest(unittest_args + extra_vm_arguments + [ "-Dgraal.TruffleCompileImmediately=true", "-Dgraal.TruffleBackgroundCompilation=false", "truffle" ]) else: mx.warn("Skipping libgraal tests: component not enabled") gate_substratevm(tasks) gate_sulong(tasks) gate_ruby(tasks) gate_python(tasks)
def build(self): source_dir = self.subject.getSourceDir() output_dir = self.subject.getOutputDir() if not emcc_dir: mx.abort("No EMCC_DIR specified - the source programs will not be compiled to .wasm.") emcc_cmd = os.path.join(emcc_dir, "emcc") gcc_cmd = os.path.join(gcc_dir, "gcc") if mx.run([emcc_cmd, "-v"], nonZeroIsFatal=False) != 0: mx.abort("Could not check the emcc version.") if mx.run([gcc_cmd, "--version"], nonZeroIsFatal=False) != 0: mx.abort("Could not check the gcc version.") if not wabt_dir: mx.abort("Set WABT_DIR if you want the binary to include .wat files.") mx.log("Building files from the source dir: " + source_dir) cc_flags = ["-g2", "-O3"] include_flags = [] if hasattr(self.project, "includeset"): include_flags = ["-I", os.path.join(_suite.dir, "includes", self.project.includeset)] emcc_flags = ["-s", "EXIT_RUNTIME=1", "-s", "STANDALONE_WASM", "-s", "WASM_BIGINT"] + cc_flags if self.project.isBenchmarkProject(): emcc_flags = emcc_flags + ["-s", "EXPORTED_FUNCTIONS=" + str(benchmark_methods).replace("'", "\"") + ""] subdir_program_names = defaultdict(lambda: []) for root, filename in self.subject.getProgramSources(): subdir = os.path.relpath(root, self.subject.getSourceDir()) mx.ensure_dir_exists(os.path.join(output_dir, subdir)) basename = remove_extension(filename) source_path = os.path.join(root, filename) output_wasm_path = os.path.join(output_dir, subdir, basename + ".wasm") output_js_path = os.path.join(output_dir, subdir, basename + ".js") timestampedSource = mx.TimeStampFile(source_path) timestampedOutput = mx.TimeStampFile(output_wasm_path) mustRebuild = timestampedSource.isNewerThan(timestampedOutput) or not timestampedOutput.exists() # Step 1: build the .wasm binary. if mustRebuild: if filename.endswith(".c"): # This generates both a js file and a wasm file. # See https://github.com/emscripten-core/emscripten/wiki/WebAssembly-Standalone build_cmd_line = [emcc_cmd] + emcc_flags + [source_path, "-o", output_js_path] + include_flags if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not build the wasm-only output of " + filename + " with emcc.") elif filename.endswith(".wat"): # Step 1: compile the .wat file to .wasm. wat2wasm_cmd = os.path.join(wabt_dir, "wat2wasm") build_cmd_line = [wat2wasm_cmd, "-o", output_wasm_path, source_path] if mx.run(build_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not translate " + filename + " to binary format.") elif filename.endswith(".wasm"): shutil.copyfile(source_path, output_wasm_path) else: mx.logv("skipping, file is up-to-date: " + source_path) # Step 2: copy the result file if it exists. result_path = os.path.join(root, basename + ".result") if os.path.isfile(result_path): result_output_path = os.path.join(output_dir, subdir, basename + ".result") shutil.copyfile(result_path, result_output_path) # Step 3: copy the opts file if it exists. opts_path = os.path.join(root, basename + ".opts") if os.path.isfile(opts_path): opts_output_path = os.path.join(output_dir, subdir, basename + ".opts") shutil.copyfile(opts_path, opts_output_path) output_wat_path = os.path.join(output_dir, subdir, basename + ".wat") if mustRebuild: if filename.endswith(".c"): # Step 4: produce the .wat files, for easier debugging. wasm2wat_cmd = os.path.join(wabt_dir, "wasm2wat") if mx.run([wasm2wat_cmd, "-o", output_wat_path, output_wasm_path], nonZeroIsFatal=False) != 0: mx.abort("Could not compile .wat file for " + filename) elif filename.endswith(".wat"): # Step 4: copy the .wat file, for easier debugging. wat_path = os.path.join(root, basename + ".wat") shutil.copyfile(wat_path, output_wat_path) # Step 5: if this is a benchmark project, create native binaries too. if mustRebuild: if filename.endswith(".c"): mx.ensure_dir_exists(os.path.join(output_dir, subdir, NATIVE_BENCH_DIR)) output_path = os.path.join(output_dir, subdir, NATIVE_BENCH_DIR, mx.exe_suffix(basename)) link_flags = ["-lm"] gcc_cmd_line = [gcc_cmd] + cc_flags + [source_path, "-o", output_path] + include_flags + link_flags if mx.run(gcc_cmd_line, nonZeroIsFatal=False) != 0: mx.abort("Could not build the native binary of " + filename + ".") os.chmod(output_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) elif filename.endswith(".wat"): mx.warn("The .wat files are not translated to native binaries: " + filename) # Remember the source name. subdir_program_names[subdir].append(basename) for subdir in subdir_program_names: with open(os.path.join(output_dir, subdir, "wasm_test_index"), "w") as f: for name in subdir_program_names[subdir]: f.write(name) f.write("\n")
def graalpython_gate_runner(args, tasks): # JUnit tests with Task('GraalPython JUnit', tasks, tags=[GraalPythonTags.junit]) as task: if task: punittest(['--verbose']) # Unittests on JVM with Task('GraalPython Python unittests', tasks, tags=[GraalPythonTags.unittest]) as task: if task: if platform.system() != 'Darwin': # TODO: drop condition when python3 is available on darwin mx.log("Running tests with CPython") test_args = [_graalpytest_driver(), "-v", _graalpytest_root()] mx.run(["python3"] + test_args, nonZeroIsFatal=True) mx.run(["env"]) run_python_unittests(python_gvm()) with Task('GraalPython sandboxed tests', tasks, tags=[GraalPythonTags.unittest_sandboxed]) as task: if task: run_python_unittests(python_gvm(["sandboxed"]), args=["--llvm.managed"]) with Task('GraalPython Jython emulation tests', tasks, tags=[GraalPythonTags.unittest_jython]) as task: if task: run_python_unittests(python_gvm(), args=["--python.EmulateJython"], paths=["test_interop.py"]) with Task('GraalPython Python tests', tasks, tags=[GraalPythonTags.tagged]) as task: if task: with set_env(ENABLE_CPYTHON_TAGGED_UNITTESTS="true", ENABLE_THREADED_GRAALPYTEST="true"): run_python_unittests(python_gvm(), args=["--python.WithThread=true"], paths=["test_tagged_unittests.py"]) # Unittests on SVM with Task('GraalPython tests on SVM', tasks, tags=[GraalPythonTags.svmunit]) as task: if task: run_python_unittests(python_svm()) with Task('GraalPython sandboxed tests on SVM', tasks, tags=[GraalPythonTags.svmunit_sandboxed]) as task: if task: run_python_unittests(python_svm(["sandboxed"]), args=["--llvm.managed"]) with Task('GraalPython license header update', tasks, tags=[GraalPythonTags.license]) as task: if task: python_checkcopyrights([]) with Task('GraalPython GraalVM shared-library build', tasks, tags=[GraalPythonTags.shared_object, GraalPythonTags.graalvm]) as task: if task: run_shared_lib_test() with Task('GraalPython GraalVM sandboxed shared-library build', tasks, tags=[ GraalPythonTags.shared_object_sandboxed, GraalPythonTags.graalvm_sandboxed ]) as task: if task: run_shared_lib_test(["sandboxed"]) with Task('GraalPython GraalVM build', tasks, tags=[GraalPythonTags.svm, GraalPythonTags.graalvm]) as task: if task: svm_image = python_svm(["--version"]) benchmark = os.path.join(PATH_MESO, "image-magix.py") out = mx.OutputCapture() mx.run([ svm_image, "-v", "-S", "--log.python.level=FINEST", benchmark ], nonZeroIsFatal=True, out=mx.TeeOutputCapture(out)) success = "\n".join([ "[0, 0, 0, 0, 0, 0, 10, 10, 10, 0, 0, 10, 3, 10, 0, 0, 10, 10, 10, 0, 0, 0, 0, 0, 0]", ]) if success not in out.data: mx.abort('Output from generated SVM image "' + svm_image + '" did not match success pattern:\n' + success) # Test that stdlib paths are not cached on packages out = mx.OutputCapture() mx.run([ svm_image, "-v", "-S", "--log.python.level=FINEST", "--python.StdLibHome=/foobar", "-c", "import encodings; print(encodings.__path__)" ], out=mx.TeeOutputCapture(out)) if "/foobar" not in out.data: mx.abort('Output from generated SVM image "' + svm_image + '" did not have patched std lib path "/foobar"') # Test that stdlib paths are not cached on modules out = mx.OutputCapture() mx.run([ svm_image, "-v", "-S", "--log.python.level=FINEST", "--python.StdLibHome=/foobar", "-c", "import encodings; print(encodings.__file__)" ], out=mx.TeeOutputCapture(out)) if "/foobar" not in out.data: mx.abort('Output from generated SVM image "' + svm_image + '" did not have patched std lib path "/foobar"') # Finally, test that we can start even if the graalvm was moved out = mx.OutputCapture() graalvm_home = svm_image.replace( os.path.sep.join(["", "bin", "graalpython"]), "") new_graalvm_home = graalvm_home + "_new" shutil.move(graalvm_home, new_graalvm_home) launcher = os.path.join(new_graalvm_home, "bin", "graalpython") mx.log(launcher) mx.run([ launcher, "--log.python.level=FINE", "-S", "-c", "print(b'abc'.decode('ascii'))" ], out=mx.TeeOutputCapture(out), err=mx.TeeOutputCapture(out)) assert "Using preinitialized context." in out.data with Task('GraalPython GraalVM native embedding', tasks, tags=[ GraalPythonTags.svm, GraalPythonTags.graalvm, GraalPythonTags.native_image_embedder ]) as task: if task: run_embedded_native_python_test()
def _native_image(args, **kwargs): mx.run([native_image_cmd] + args, **kwargs)
def gnu_r(args): ''' run the internally built GNU R executable' ''' cmd = [join(_gnur_path(), 'bin', 'R')] + args return mx.run(cmd, nonZeroIsFatal=False)
def _update_JVMCI_library(): """ Updates the "path" and "sha1" attributes of the "JVMCI" library to refer to a jvmci.jar created from the JVMCI classes in JDK9. """ suiteDict = _suite.suiteDict jvmciLib = suiteDict['libraries']['JVMCI'] d = join(_suite.get_output_root(), abspath(_jdk.home)[1:]) path = join(d, 'jvmci.jar') explodedModule = join(_jdk.home, 'modules', 'jdk.vm.ci') if exists(explodedModule): jarInputs = {} newestJarInput = None for root, _, files in os.walk(explodedModule): relpath = root[len(explodedModule) + 1:] for f in files: arcname = join(relpath, f).replace(os.sep, '/') jarInput = join(root, f) jarInputs[arcname] = jarInput t = mx.TimeStampFile(jarInput) if newestJarInput is None or t.isNewerThan(newestJarInput): newestJarInput = t if not exists(path) or newestJarInput.isNewerThan(path): with mx.Archiver(path, kind='zip') as arc: for arcname, jarInput in jarInputs.iteritems(): with open(jarInput, 'rb') as fp: contents = fp.read() arc.zf.writestr(arcname, contents) else: # Use the jdk.internal.jimage utility since it's the only way # to partially read .jimage files as the JDK9 jimage tool # does not support partial extraction. bootmodules = join(_jdk.home, 'lib', 'modules', 'bootmodules.jimage') if not exists(bootmodules): mx.abort('Could not find JVMCI classes at ' + bootmodules + ' or ' + explodedModule) if not exists(path) or mx.TimeStampFile(bootmodules).isNewerThan(path): mx.ensure_dir_exists(d) javaSource = join(d, 'ExtractJVMCI.java') with open(javaSource, 'w') as fp: print >> fp, """import java.io.FileOutputStream; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import jdk.internal.jimage.BasicImageReader; public class ExtractJVMCI { public static void main(String[] args) throws Exception { BasicImageReader image = BasicImageReader.open(args[0]); String[] names = image.getEntryNames(); if (names.length == 0) { return; } try (JarOutputStream jos = new JarOutputStream(new FileOutputStream(args[1]))) { for (String name : names) { if (name.startsWith("/jdk.vm.ci/")) { String ename = name.substring("/jdk.vm.ci/".length()); JarEntry je = new JarEntry(ename); jos.putNextEntry(je); jos.write(image.getResource(name)); jos.closeEntry(); } } } } } """ mx.run([_jdk.javac, '-d', d, javaSource]) mx.run([_jdk.java, '-cp', d, 'ExtractJVMCI', bootmodules, path]) if not exists(path): mx.abort('Could not find the JVMCI classes in ' + bootmodules) jvmciLib['path'] = path jvmciLib['sha1'] = mx.sha1OfFile(path)
def gnu_rtests(args, env=None): ''' run tests of the internally built GNU R under tests subdirectory ''' os.chdir(_fastr_suite.dir) # Packages install fails otherwise # mx_fastr_pkgs.installpkgs(['--pkg-pattern', '^MASS$']) # required by tests/Examples/base-Ex.R np = mx.project('com.oracle.truffle.r.native') tst = join(np.dir, 'gnur', 'tests') tstsrc = join(tst, 'src') tstlog = join(tst, 'log') shutil.rmtree(tstlog, True) os.mkdir(tstlog) diffname = join(tstlog, 'all.diff') diff = open(diffname, 'a') try: for subd in ['Examples', '']: logd = join(tstlog, subd) if subd != '': os.mkdir(logd) os.chdir(logd) srcd = join(tstsrc, subd) for f in sorted(os.listdir(srcd)): if f.endswith('.R'): print( 'Running {} explicitly by FastR CMD BATCH ...'.format( f)) # pylint: disable=superfluous-parens mx.run( [r_path(), '--vanilla', 'CMD', 'BATCH', join(srcd, f)] + args, nonZeroIsFatal=False, env=env, timeout=90) outf = f + 'out' if os.path.isfile(outf): outff = outf + '.fastr' os.rename(outf, outff) print('Running {} explicitly by GnuR CMD BATCH ...'. format(f)) # pylint: disable=superfluous-parens mx.run([ join(_gnur_path(), 'bin', 'R'), '--vanilla', 'CMD', 'BATCH', join(srcd, f) ] + args, nonZeroIsFatal=False, env=env, timeout=90) if os.path.isfile(outf): outfg = outf + '.gnur' os.rename(outf, outfg) diff.write('\nRdiff {} {}:\n'.format(outfg, outff)) diff.flush() subprocess.Popen( [r_path(), 'CMD', 'Rdiff', outfg, outff], stdout=diff, stderr=diff, shell=False) diff.flush() diff.close() print('FastR to GnuR diff was written to {}'.format(diffname)) # pylint: disable=superfluous-parens finally: shutil.rmtree(join(_fastr_suite.dir, 'deparse'), True)
def dragonEggGPP(args=None): """executes G++ with dragonegg""" executeCommand = [getGPP(), "-fplugin=" + dragonEggPath(), '-fplugin-arg-dragonegg-emit-ir'] return mx.run(executeCommand + args)
def run_java(self, args, out=None, err=None, cwd=None, nonZeroIsFatal=False): """Run 'java' workloads.""" self.extract_vm_info(args) return mx.run([os.path.join(mx_sdk_vm_impl.graalvm_home(fatalIfMissing=True), 'bin', 'java')] + args, out=out, err=err, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal)
def run_output(args, cwd=None): out = mx.OutputCapture() mx.run(args, cwd=cwd, out=out, err=out) return out.data
def run_java(self, args, out=None, err=None, cwd=None, nonZeroIsFatal=False): if '-version' in args: return super(NativeImageVM, self).run_java(args, out=out, err=err, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal) else: image_cwd = os.path.abspath(cwd if cwd else os.getcwd()) non_zero_is_fatal = self.is_gate or nonZeroIsFatal config = NativeImageVM.BenchmarkConfig() original_java_run_args = config.parse(args) executable, classpath_arguments, system_properties, image_run_args = NativeImageVM.extract_benchmark_arguments( original_java_run_args) executable_name = (os.path.splitext(os.path.basename( executable[1]))[0] if executable[0] == '-jar' else executable[0]).lower() image_path = os.path.join(image_cwd, executable_name) config.profile_dir = mx.mkdtemp(suffix='profile', prefix='native-image') profile_path = os.path.join(config.profile_dir, executable_name + '.iprof') # Agent configuration and/or HotSpot profiling needs_config = (config.config_dir is None) and config.needs_config if needs_config or self.hotspot_pgo: hotspot_vm_args = ['-ea', '-esa'] if self.is_gate else [] hotspot_run_args = [] if needs_config: config.config_dir = mx.mkdtemp(suffix='config', prefix='native-image') hotspot_vm_args += [ '-agentlib:native-image-agent=config-output-dir=' + str(config.config_dir) ] if self.hotspot_pgo: hotspot_vm_args += [ '-Dgraal.PGOInstrument=' + profile_path ] if config.extra_agent_run_args: hotspot_run_args += config.extra_profile_run_args if self.hotspot_pgo and not self.is_gate else config.extra_agent_run_args else: hotspot_run_args += image_run_args hotspot_args = hotspot_vm_args + classpath_arguments + executable + system_properties + hotspot_run_args mx.log( 'Running with HotSpot to get the configuration files and profiles. This could take a while:' ) super(NativeImageVM, self).run_java(hotspot_args, out=None, err=None, cwd=image_cwd, nonZeroIsFatal=non_zero_is_fatal) base_image_build_args = [ os.path.join(mx_vm.graalvm_home(fatalIfMissing=True), 'bin', 'native-image') ] base_image_build_args += ['--no-fallback'] base_image_build_args += [ '-J-ea', '-J-esa', '-H:+VerifyGraalGraphs', '-H:+VerifyPhases', '-H:+TraceClassInitialization' ] if self.is_gate else [] base_image_build_args += system_properties base_image_build_args += classpath_arguments base_image_build_args += executable base_image_build_args += [ '-H:Name=' + executable_name, '-H:Path=' + image_cwd ] if needs_config: base_image_build_args += [ '-H:ConfigurationFileDirectories=' + config.config_dir ] base_image_build_args += config.extra_image_build_arguments # PGO instrumentation i = 0 while i < self.pgo_instrumented_iterations: instrument_args = ['--pgo-instrument'] + ( [] if i == 0 and not self.hotspot_pgo else ['--pgo']) instrument_image_build_args = base_image_build_args + instrument_args mx.log('Building the instrumentation image with: ') mx.log(' ' + ' '.join([ pipes.quote(str(arg)) for arg in instrument_image_build_args ])) mx.run(instrument_image_build_args, out=None, err=None, cwd=image_cwd, nonZeroIsFatal=non_zero_is_fatal) image_run_cmd = [image_path] image_run_cmd += ['-XX:ProfilesDumpFile=' + profile_path] if config.extra_profile_run_args: image_run_cmd += config.extra_profile_run_args else: image_run_cmd += image_run_args + config.extra_run_args mx.log('Running the instrumented image with: ') mx.log( ' ' + ' '.join([pipes.quote(str(arg)) for arg in image_run_cmd])) mx.run(image_run_cmd, out=out, err=err, cwd=image_cwd, nonZeroIsFatal=non_zero_is_fatal) i += 1 # Build the final image pgo_args = ['--pgo=' + profile_path] if self.pgo_instrumented_iterations > 0 or self.hotspot_pgo else [] final_image_args = base_image_build_args + pgo_args mx.log('Building the final image with: ') mx.log( ' ' + ' '.join([pipes.quote(str(arg)) for arg in final_image_args])) mx.run(final_image_args, out=None, err=None, cwd=image_cwd, nonZeroIsFatal=non_zero_is_fatal) # Execute the benchmark image_run_cmd = [image_path ] + image_run_args + config.extra_run_args mx.log('Running the produced native executable with: ') mx.log(' ' + ' '.join([pipes.quote(str(arg)) for arg in image_run_cmd])) mx.run(image_run_cmd, out=out, err=err, cwd=image_cwd, nonZeroIsFatal=non_zero_is_fatal)
def updategraalinopenjdk(args): """updates the Graal sources in OpenJDK""" parser = ArgumentParser(prog='mx updategraalinopenjdk') parser.add_argument( '--pretty', help= 'value for --pretty when logging the changes since the last JDK* tag') parser.add_argument('jdkrepo', help='path to the local OpenJDK repo') parser.add_argument('version', type=int, help='Java version of the OpenJDK repo') args = parser.parse_args(args) if mx_compiler.jdk.javaCompliance.value < args.version: mx.abort( 'JAVA_HOME/--java-home must be Java version {} or greater: {}'. format(args.version, mx_compiler.jdk)) graal_modules = [ # JDK module jdk.internal.vm.compiler is composed of sources from: GraalJDKModule( 'jdk.internal.vm.compiler', # 1. Classes in the compiler suite under the org.graalvm namespace except for packages # or projects whose names include "truffle", "management" or "core.llvm" [ SuiteJDKInfo('compiler', ['org.graalvm'], ['truffle', 'management', 'core.llvm']), # 2. Classes in the sdk suite under the org.graalvm.collections and org.graalvm.word namespaces SuiteJDKInfo( 'sdk', ['org.graalvm.collections', 'org.graalvm.word'], []) ]), # JDK module jdk.internal.vm.compiler.management is composed of sources from: GraalJDKModule( 'jdk.internal.vm.compiler.management', # 1. Classes in the compiler suite under the org.graalvm.compiler.hotspot.management namespace [ SuiteJDKInfo('compiler', ['org.graalvm.compiler.hotspot.management'], []) ]), # JDK module jdk.aot is composed of sources from: GraalJDKModule( 'jdk.aot', # 1. Classes in the compiler suite under the jdk.tools.jaotc namespace [SuiteJDKInfo('compiler', ['jdk.tools.jaotc'], [])]), ] # Strings to be replaced in files copied to OpenJDK. replacements = { 'published by the Free Software Foundation. Oracle designates this\n * particular file as subject to the "Classpath" exception as provided\n * by Oracle in the LICENSE file that accompanied this code.': 'published by the Free Software Foundation.', _read_sibling_file('upl_substring.txt'): _read_sibling_file('gplv2_substring.txt') } # Strings that must not exist in OpenJDK source files. This is applied after replacements are made. blacklist = ['"Classpath" exception'] jdkrepo = args.jdkrepo git_repo = _is_git_repo(jdkrepo) for m in graal_modules: m_src_dir = join(jdkrepo, 'src', m.name) if not exists(m_src_dir): mx.abort(jdkrepo + ' does not look like a JDK repo - ' + m_src_dir + ' does not exist') def run_output(args, cwd=None): out = mx.OutputCapture() mx.run(args, cwd=cwd, out=out, err=out) return out.data for m in graal_modules: m_src_dir = join('src', m.name) mx.log('Checking ' + m_src_dir) if git_repo: out = run_output(['git', 'status', '-s', m_src_dir], cwd=jdkrepo) else: out = run_output(['hg', 'status', m_src_dir], cwd=jdkrepo) if out: mx.abort(jdkrepo + ' is not "clean":' + '\n' + out[:min(200, len(out))] + '...') for dirpath, _, filenames in os.walk(join(jdkrepo, 'make')): for filename in filenames: if filename.endswith('.gmk'): rename_packages(join(dirpath, filename), True) java_package_re = re.compile( r"^\s*package\s+(?P<package>[a-zA-Z_][\w\.]*)\s*;$", re.MULTILINE) copied_source_dirs = [] jdk_internal_vm_compiler_EXCLUDES = set() # pylint: disable=invalid-name jdk_internal_vm_compiler_test_SRC = set() # pylint: disable=invalid-name # Add org.graalvm.compiler.processor since it is only a dependency # for (most) Graal annotation processors and is not needed to # run Graal. jdk_internal_vm_compiler_EXCLUDES.add('org.graalvm.compiler.processor') for m in graal_modules: classes_dir = join(jdkrepo, 'src', m.name, 'share', 'classes') for info in m.suites: mx.log('Processing ' + m.name + ':' + info.name) for e in os.listdir(classes_dir): if any(inc in e for inc in info.includes) and not any( ex in e for ex in info.excludes): project_dir = join(classes_dir, e) shutil.rmtree(project_dir) mx.log(' removed ' + project_dir) suite = mx.suite(info.name) worklist = [] for p in [e for e in suite.projects if e.isJavaProject()]: if any(inc in p.name for inc in info.includes) and not any( ex in p.name for ex in info.excludes): assert len(p.source_dirs()) == 1, p version = 0 new_project_name = p.name if hasattr(p, 'multiReleaseJarVersion'): version = int(getattr(p, 'multiReleaseJarVersion')) if version <= args.version: base_project = _find_version_base_project(p) new_project_name = base_project.name else: continue for old_name, new_name in package_renamings.items(): if new_project_name.startswith(old_name): new_project_name = new_project_name.replace( old_name, new_name) source_dir = p.source_dirs()[0] target_dir = join(classes_dir, new_project_name, 'src') copied_source_dirs.append(source_dir) workitem = (version, p, source_dir, target_dir) worklist.append(workitem) # Ensure versioned resources are copied in the right order # such that higher versions override lower versions. worklist = sorted(worklist) for version, p, source_dir, target_dir in worklist: first_file = True for dirpath, _, filenames in os.walk(source_dir): for filename in filenames: src_file = join(dirpath, filename) dst_file = join(target_dir, os.path.relpath(src_file, source_dir)) with open(src_file) as fp: contents = fp.read() old_line_count = len(contents.split('\n')) if filename.endswith('.java'): for old_name, new_name in package_renamings.items( ): old_name_as_dir = old_name.replace('.', os.sep) if old_name_as_dir in src_file: new_name_as_dir = new_name.replace( '.', os.sep) dst = src_file.replace( old_name_as_dir, new_name_as_dir) dst_file = join( target_dir, os.path.relpath(dst, source_dir)) contents = contents.replace(old_name, new_name) for old_line, new_line in replacements.items(): contents = contents.replace(old_line, new_line) match = java_package_re.search(contents) if not match: mx.abort( 'Could not find package declaration in {}'. format(src_file)) java_package = match.group('package') if any(ex in java_package for ex in info.excludes): mx.log(' excluding ' + filename) continue new_line_count = len(contents.split('\n')) if new_line_count > old_line_count: mx.abort( 'Pattern replacement caused line count to grow from {} to {} in {}' .format(old_line_count, new_line_count, src_file)) else: if new_line_count < old_line_count: contents = contents.replace( '\npackage ', '\n' * (old_line_count - new_line_count) + '\npackage ') new_line_count = len(contents.split('\n')) if new_line_count != old_line_count: mx.abort('Unable to correct line count for {}'. format(src_file)) for forbidden in blacklist: if forbidden in contents: mx.abort( 'Found blacklisted pattern \'{}\' in {}' .format(forbidden, src_file)) dst_dir = os.path.dirname(dst_file) if not exists(dst_dir): os.makedirs(dst_dir) if first_file: mx.log(' copying: ' + source_dir) mx.log(' to: ' + target_dir) if p.testProject or p.definedAnnotationProcessors: to_exclude = p.name for old_name, new_name in package_renamings.items( ): if to_exclude.startswith(old_name): sfx = '' if to_exclude == old_name else to_exclude[ len(old_name):] to_exclude = new_name + sfx break jdk_internal_vm_compiler_EXCLUDES.add( to_exclude) if p.testProject: jdk_internal_vm_compiler_test_SRC.add( to_exclude) first_file = False with open(dst_file, 'w') as fp: fp.write(contents) def replace_lines(filename, begin_lines, end_line, replace_lines, old_line_check, preserve_indent=False, append_mode=False): mx.log('Updating ' + filename + '...') old_lines = [] new_lines = [] with open(filename) as fp: for begin_line in begin_lines: line = fp.readline() while line: stripped_line = line.strip() if stripped_line == begin_line: new_lines.append(line) break new_lines.append(line) line = fp.readline() assert line, begin_line + ' not found' lines = fp.readlines() line_in_def = True indent = 0 if preserve_indent: line = lines[0] lstripped_line = line.lstrip() indent = len(line) - len(lstripped_line) if not append_mode: for replace in replace_lines: new_lines.append(' ' * indent + replace) for line in lines: stripped_line = line.strip() if line_in_def: if stripped_line == end_line: line_in_def = False new_lines.append(line) else: old_line_check(line) if append_mode: new_lines.append(line) if append_mode and not line_in_def: # reach end line and append new lines for replace in replace_lines: new_lines.append(replace) else: new_lines.append(line) with open(filename, 'w') as fp: for line in new_lines: fp.write(line) return old_lines def single_column_with_continuation(line): parts = line.split() assert len(parts) == 2 and parts[1] == '\\', line # Update jdk.internal.vm.compiler.EXCLUDES in make/CompileJavaModules.gmk # to exclude all test, benchmark and annotation processor packages. CompileJavaModules_gmk = join(jdkrepo, 'make', 'CompileJavaModules.gmk') # pylint: disable=invalid-name new_lines = [] for pkg in sorted(jdk_internal_vm_compiler_EXCLUDES): new_lines.append(pkg + ' \\\n') begin_lines = ['jdk.internal.vm.compiler_EXCLUDES += \\'] end_line = '#' old_line_check = single_column_with_continuation replace_lines(CompileJavaModules_gmk, begin_lines, end_line, new_lines, old_line_check, preserve_indent=True) if args.version == 11: # add aot exclude out = run_output(['grep', 'jdk.aot_EXCLUDES', CompileJavaModules_gmk], cwd=jdkrepo) if out: # replace existing exclude setting begin_lines = ['jdk.aot_EXCLUDES += \\'] end_line = '#' new_lines = ['jdk.tools.jaotc.test \\\n'] replace_lines(CompileJavaModules_gmk, begin_lines, end_line, new_lines, old_line_check, preserve_indent=True) else: # append exclude setting after jdk.internal.vm.compiler_EXCLUDES new_lines = [ '\n', 'jdk.aot_EXCLUDES += \\\n', ' jdk.tools.jaotc.test \\\n', ' #\n', '\n' ] # indent is inlined replace_lines(CompileJavaModules_gmk, begin_lines, end_line, new_lines, old_line_check, preserve_indent=True, append_mode=True) # Update 'SRC' in the 'Compile graalunit tests' section of make/test/JtregGraalUnit.gmk # to include all test packages. JtregGraalUnit_gmk = join(jdkrepo, 'make', 'test', 'JtregGraalUnit.gmk') # pylint: disable=invalid-name new_lines = [] jdk_internal_vm_compiler_test_SRC.discard('jdk.tools.jaotc.test') jdk_internal_vm_compiler_test_SRC.discard( 'org.graalvm.compiler.microbenchmarks') jdk_internal_vm_compiler_test_SRC.discard( 'org.graalvm.compiler.virtual.bench') jdk_internal_vm_compiler_test_SRC.discard('org.graalvm.micro.benchmarks') for pkg in sorted(jdk_internal_vm_compiler_test_SRC): new_lines.append('$(SRC_DIR)/' + pkg + '/src \\\n') if args.version == 11: begin_lines = ['### Compile and build graalunit tests', 'SRC := \\'] else: begin_lines = ['### Compile graalunit tests', 'SRC := \\'] end_line = ', \\' old_line_check = single_column_with_continuation replace_lines(JtregGraalUnit_gmk, begin_lines, end_line, new_lines, old_line_check, preserve_indent=True) overwritten = '' if not git_repo: mx.log('Adding new files to HG...') m_src_dirs = [] for m in graal_modules: m_src_dirs.append(join('src', m.name)) out = run_output([ 'hg', 'log', '-r', 'last(keyword("Update Graal"))', '--template', '{rev}' ] + m_src_dirs, cwd=jdkrepo) last_graal_update = out.strip() for m in graal_modules: m_src_dir = join('src', m.name) if last_graal_update: overwritten += run_output([ 'hg', 'diff', '-r', last_graal_update, '-r', 'tip', m_src_dir ], cwd=jdkrepo) mx.run(['hg', 'add', m_src_dir], cwd=jdkrepo) mx.log('Removing old files from HG...') for m in graal_modules: m_src_dir = join('src', m.name) out = run_output(['hg', 'status', '-dn', m_src_dir], cwd=jdkrepo) if out: mx.run(['hg', 'rm'] + out.split(), cwd=jdkrepo) out = run_output(['git', 'tag', '-l', 'JDK-*'], cwd=mx_compiler._suite.vc_dir) last_jdk_tag = sorted(out.split(), reverse=True)[0] pretty = args.pretty or 'format:%h %ad %>(20) %an %s' out = run_output([ 'git', '--no-pager', 'log', '--merges', '--abbrev-commit', '--pretty=' + pretty, '--first-parent', '-r', last_jdk_tag + '..HEAD' ] + copied_source_dirs, cwd=mx_compiler._suite.vc_dir) changes_file = 'changes-since-{}.txt'.format(last_jdk_tag) with open(changes_file, 'w') as fp: fp.write(out) mx.log('Saved changes since {} to {}'.format( last_jdk_tag, os.path.abspath(changes_file))) if overwritten: overwritten_file = 'overwritten-diffs.txt' with open(overwritten_file, 'w') as fp: fp.write(overwritten) mx.warn( 'Overwritten changes detected in OpenJDK Graal! See diffs in ' + os.path.abspath(overwritten_file))
def run_shared_lib_test(args=None): mx.run_mx([ "--dynamicimports", "/substratevm,/vm", "build", "--force-deprecation-as-warning", "--dependencies", "GRAAL_MANAGEMENT,POLYGLOT_NATIVE_API_HEADERS,libpolyglot.so.image" ], nonZeroIsFatal=True) vmdir = os.path.join(mx.suite("truffle").dir, "..", "vm") svm_lib_path = os.path.join(vmdir, "mxbuild", "-".join([mx.get_os(), mx.get_arch()]), "libpolyglot.so.image") fd = name = progname = None try: fd, name = tempfile.mkstemp(suffix='.c') os.write( fd, """ #include "stdio.h" #include "polyglot_api.h" #define assert_ok(msg, f) { if (!(f)) { \\ const poly_extended_error_info* error_info; \\ poly_get_last_error_info(isolate_thread, &error_info); \\ fprintf(stderr, "%s\\n", error_info->error_message); \\ return fprintf(stderr, "%s\\n", msg); } } while (0) poly_isolate global_isolate; poly_thread isolate_thread; poly_engine engine; poly_context context; static poly_status create_context() { poly_status status; if (poly_attach_thread(global_isolate, &isolate_thread)) { return poly_generic_failure; } poly_engine_builder engine_builder; status = poly_create_engine_builder(isolate_thread, &engine_builder); if (status != poly_ok) { return status; } status = poly_engine_builder_build(isolate_thread, engine_builder, &engine); if (status != poly_ok) { return status; } poly_context_builder builder; status = poly_create_context_builder(isolate_thread, NULL, 0, &builder); if (status != poly_ok) { return status; } status = poly_context_builder_engine(isolate_thread, builder, engine); if (status != poly_ok) { return status; } status = poly_context_builder_option(isolate_thread, builder, "python.VerboseFlag", "true"); if (status != poly_ok) { return status; } status = poly_context_builder_allow_io(isolate_thread, builder, true); if (status != poly_ok) { return status; } status = poly_context_builder_build(isolate_thread, builder, &context); if (status != poly_ok) { return status; } return poly_ok; } static poly_status tear_down_context() { poly_status status = poly_context_close(isolate_thread, context, true); if (status != poly_ok) { return status; } status = poly_engine_close(isolate_thread, engine, true); if (status != poly_ok) { return status; } if (poly_detach_thread(isolate_thread)) { return poly_ok; } return poly_ok; } static int test_basic_python_function() { assert_ok("Context creation failed.", create_context() == poly_ok); poly_value func; assert_ok("function eval failed", poly_context_eval(isolate_thread, context, "python", "test_func", "def test_func(x):\\n return x * x\\ntest_func", &func) == poly_ok); int32_t arg_value = 42; poly_value primitive_object; assert_ok("create argument failed", poly_create_int32(isolate_thread, context, arg_value, &primitive_object) == poly_ok); poly_value arg[1] = {primitive_object}; poly_value value; assert_ok("invocation was unsuccessful", poly_value_execute(isolate_thread, func, arg, 1, &value) == poly_ok); int32_t result_value; poly_value_as_int32(isolate_thread, value, &result_value); assert_ok("value computation was incorrect", result_value == 42 * 42); assert_ok("Context tear down failed.", tear_down_context() == poly_ok); return 0; } int32_t main(int32_t argc, char **argv) { poly_isolate_params isolate_params = {}; if (poly_create_isolate(&isolate_params, &global_isolate, &isolate_thread)) { return 1; } return test_basic_python_function(); } """) os.close(fd) progname = os.path.join(SUITE.dir, "graalpython-embedded-tool") mx.log("".join([ "Running ", "'clang", "-I%s" % svm_lib_path, "-L%s" % svm_lib_path, name, "-o", progname, "-lpolyglot" ])) mx.run([ "clang", "-I%s" % svm_lib_path, "-L%s" % svm_lib_path, name, "-o%s" % progname, "-lpolyglot" ], nonZeroIsFatal=True) mx.log("Running " + progname + " with LD_LIBRARY_PATH " + svm_lib_path) mx.run(["ls", "-l", progname]) mx.run(["ls", "-l", svm_lib_path]) run_env = { "LD_LIBRARY_PATH": svm_lib_path, "GRAAL_PYTHONHOME": os.environ["GRAAL_PYTHONHOME"] } print(run_env) mx.run([progname], env=run_env) finally: try: os.unlink(progname) except: pass try: os.close(fd) except: pass try: os.unlink(name) except: pass
def run_shared_lib_test(args=None): if args is None: args = [] launcher = python_so(args) svm_lib_path = os.path.abspath( os.path.join(launcher, "..", "..", "jre", "lib", "polyglot")) fd = name = progname = None try: fd, name = tempfile.mkstemp(suffix='.c') os.write( fd, b""" #include "stdio.h" #include "polyglot_api.h" #define assert_ok(msg, f) { if (!(f)) { \\ const poly_extended_error_info* error_info; \\ poly_get_last_error_info(isolate_thread, &error_info); \\ fprintf(stderr, "%%s\\n", error_info->error_message); \\ return fprintf(stderr, "%%s\\n", msg); } } while (0) poly_isolate global_isolate; poly_thread isolate_thread; poly_engine engine; poly_context context; static poly_status create_context() { poly_status status; if (poly_attach_thread(global_isolate, &isolate_thread)) { return poly_generic_failure; } poly_engine_builder engine_builder; status = poly_create_engine_builder(isolate_thread, &engine_builder); if (status != poly_ok) { return status; } status = poly_engine_builder_build(isolate_thread, engine_builder, &engine); if (status != poly_ok) { return status; } poly_context_builder builder; status = poly_create_context_builder(isolate_thread, NULL, 0, &builder); if (status != poly_ok) { return status; } status = poly_context_builder_engine(isolate_thread, builder, engine); if (status != poly_ok) { return status; } status = poly_context_builder_option(isolate_thread, builder, "python.VerboseFlag", "true"); if (status != poly_ok) { return status; } #if %s status = poly_context_builder_option(isolate_thread, builder, "llvm.managed", "true"); if (status != poly_ok) { return status; } #endif status = poly_context_builder_allow_io(isolate_thread, builder, true); if (status != poly_ok) { return status; } status = poly_context_builder_build(isolate_thread, builder, &context); if (status != poly_ok) { return status; } return poly_ok; } static poly_status tear_down_context() { poly_status status = poly_context_close(isolate_thread, context, true); if (status != poly_ok) { return status; } status = poly_engine_close(isolate_thread, engine, true); if (status != poly_ok) { return status; } if (poly_detach_thread(isolate_thread)) { return poly_ok; } return poly_ok; } static int test_basic_python_function() { assert_ok("Context creation failed.", create_context() == poly_ok); poly_value func; assert_ok("function eval failed", poly_context_eval(isolate_thread, context, "python", "test_func", "def test_func(x):\\n return x * x\\ntest_func", &func) == poly_ok); int32_t arg_value = 42; poly_value primitive_object; assert_ok("create argument failed", poly_create_int32(isolate_thread, context, arg_value, &primitive_object) == poly_ok); poly_value arg[1] = {primitive_object}; poly_value value; assert_ok("invocation was unsuccessful", poly_value_execute(isolate_thread, func, arg, 1, &value) == poly_ok); int32_t result_value; poly_value_as_int32(isolate_thread, value, &result_value); assert_ok("value computation was incorrect", result_value == 42 * 42); assert_ok("Context tear down failed.", tear_down_context() == poly_ok); return 0; } int32_t main(int32_t argc, char **argv) { poly_isolate_params isolate_params = {}; if (poly_create_isolate(&isolate_params, &global_isolate, &isolate_thread)) { return 1; } return test_basic_python_function(); } """ % (b"1" if "sandboxed" in args else b"0")) os.close(fd) progname = os.path.join(SUITE.dir, "graalpython-embedded-tool") mx.log("".join([ "Running ", "'clang", "-I%s" % svm_lib_path, "-L%s" % svm_lib_path, name, "-o", progname, "-lpolyglot" ])) mx.run([ "clang", "-I%s" % svm_lib_path, "-L%s" % svm_lib_path, name, "-o%s" % progname, "-lpolyglot" ], nonZeroIsFatal=True) mx.log("Running " + progname + " with LD_LIBRARY_PATH " + svm_lib_path) mx.run(["ls", "-l", progname]) mx.run(["ls", "-l", svm_lib_path]) run_env = { "LD_LIBRARY_PATH": svm_lib_path, "GRAAL_PYTHONHOME": os.environ["GRAAL_PYTHONHOME"] } mx.log(repr(run_env)) mx.run([progname], env=run_env) finally: try: os.unlink(progname) except: pass try: os.close(fd) except: pass try: os.unlink(name) except: pass
def python3_unittests(args): mx.run([ "python3", "graalpython/com.oracle.graal.python.test/src/python_unittests.py", "-v" ] + args)
def python3_unittests(args): """run the cPython stdlib unittests""" mx.run(["python3", "graalpython/com.oracle.graal.python.test/src/python_unittests.py", "-v"] + args)
def jdkartifactstats(args): """show stats about JDK deployed Graal artifacts""" artifacts = {} jdkDir = get_jvmci_jdk().home def _getDeployedJars(): if JVMCI_VERSION < 9: for root, _, filenames in os.walk(join(jdkDir, 'jre', 'lib')): for f in filenames: if f.endswith('.jar') and not f.endswith('.stripped.jar'): yield join(root, f) else: for jdkDist in jdkDeployedDists: dist = jdkDist.dist() if isinstance(jdkDist, JvmciJDKDeployedDist): yield dist.path for jar in _getDeployedJars(): f = basename(jar) if 'truffle' in f: if 'enterprise' in f: artifacts.setdefault('GraalEnterpriseTruffle', []).append(jar) else: artifacts.setdefault('GraalTruffle', []).append(jar) elif 'enterprise' in f: artifacts.setdefault('GraalEnterprise', []).append(jar) elif 'jvmci' in f: artifacts.setdefault('JVMCI', []).append(jar) elif 'graal' in f: artifacts.setdefault('Graal', []).append(jar) else: mx.logv('ignored: ' + jar) print '{:>10} {:>10} {:>10} {}'.format('All', 'NoVars', 'None', 'Jar') for category in sorted(artifacts.viewkeys()): jars = artifacts[category] if jars: totals = (0, 0, 0) print for j in jars: gSize = os.path.getsize(j) stripped = j[:-len('.jar')] + '.stripped.jar' mx.run([ mx.get_jdk().pack200, '--repack', '--quiet', '-J-Djava.util.logging.config.file=', '-DLocalVariableTypeTable=strip', '-DLocalVariableTable=strip', stripped, j ]) gLinesSourceSize = os.path.getsize(stripped) mx.run([ mx.get_jdk().pack200, '--repack', '--quiet', '-J-Djava.util.logging.config.file=', '-G', stripped, j ]) gNoneSize = os.path.getsize(stripped) os.remove(stripped) print '{:10,} {:10,} {:10,} {}:{}'.format( gSize, gLinesSourceSize, gNoneSize, category, basename(j)) t1, t2, t3 = totals totals = (t1 + gSize, t2 + gLinesSourceSize, t3 + gNoneSize) t1, t2, t3 = totals print '{:10,} {:10,} {:10,} {}'.format(t1, t2, t3, category) jvmLib = join(jdkDir, relativeVmLibDirInJdk(), get_vm(), mx.add_lib_suffix(mx.add_lib_prefix('jvm'))) print if exists(jvmLib): print '{:10,} {}'.format(os.path.getsize(jvmLib), jvmLib) else: print '{:>10} {}'.format('<missing>', jvmLib)
def python_build_watch(args): """ Watch the suite and on any changes to .class, .jar, .h, or .c files rebuild. By default, rebuilds only the archives and non-Java projects. """ parser = ArgumentParser(prog='mx python-build-watch') parser.add_argument('--full', action='store_true', help='Run a full mx build', required=False) parser.add_argument('--graalvm', action='store_true', help='Build a graalvm', required=False) parser.add_argument( '--no-java', action='store_true', help='Build only archives and native projects [default]', required=False) args = parser.parse_args(args) if sum([args.full, args.graalvm, args.no_java]) > 1: mx.abort("Only one of --full, --graalvm, --no-java can be specified") if args.full: # suffixes = [".c", ".h", ".class", ".jar", ".java"] excludes = [".*\\.py$"] elif args.graalvm: # suffixes = [".c", ".h", ".class", ".jar", ".java", ".py"] excludes = ["mx_.*\\.py$"] else: # suffixes = [".c", ".h", ".class", ".jar"] excludes = [".*\\.py$", ".*\\.java$"] cmd = [ "inotifywait", "-q", "-e", "close_write,moved_to", "-r", "--format=%f" ] for e in excludes: cmd += ["--exclude", e] cmd += ["@%s" % os.path.join(SUITE.dir, ".git"), SUITE.dir] cmd_qq = cmd[:] cmd_qq[1] = "-qq" was_quiet = mx.get_opts().quiet while True: out = mx.OutputCapture() if mx.run(cmd, out=out, nonZeroIsFatal=False) != 0: continue changed_file = out.data.strip() mx.logv(changed_file) if any( changed_file.endswith(ext) for ext in [".c", ".h", ".class", ".jar"]): if not mx.get_opts().quiet: sys.stdout.write("Build needed ") sys.stdout.flush() while True: # re-run this until it times out, which we'll interpret as quiet # time if not mx.get_opts().quiet: sys.stdout.write(".") sys.stdout.flush() mx.get_opts().quiet = True try: retcode = mx.run(cmd_qq, timeout=3, nonZeroIsFatal=False) finally: mx.get_opts().quiet = was_quiet if retcode == mx.ERROR_TIMEOUT: if not mx.get_opts().quiet: sys.stdout.write("\n") break mx.log("Building.") if args.full: mx.command_function("build")() elif args.graalvm: mx.log(python_gvm()) else: nativebuild([]) mx.log("Build done.")
def jlink_new_jdk(jdk, dst_jdk_dir, module_dists, root_module_names=None, missing_export_target_action='create', with_source=lambda x: True, vendor_info=None, dedup_legal_notices=True): """ Uses jlink from `jdk` to create a new JDK image in `dst_jdk_dir` with `module_dists` and their dependencies added to the JDK image, replacing any existing modules of the same name. :param JDKConfig jdk: source JDK :param str dst_jdk_dir: path to use for the jlink --output option :param list module_dists: list of distributions defining modules :param list root_module_names: list of strings naming the module root set for the new JDK image. The named modules must either be in `module_dists` or in `jdk`. If None, then the root set will be all the modules in ``module_dists` and `jdk`. :param str missing_export_target_action: the action to perform for a qualifed export target that is not present in `module_dists` and does not have a hash stored in java.base. The choices are: "create" - an empty module is created "error" - raise an error None - do nothing :param lambda with_source: returns True if the sources of a module distribution must be included in the new JDK :param dict vendor_info: values for the jlink vendor options added by JDK-8232080 """ assert callable(with_source) if jdk.javaCompliance < '9': mx.abort('Cannot derive a new JDK from ' + jdk.home + ' with jlink since it is not JDK 9 or later') exploded_java_base_module = join(jdk.home, 'modules', 'java.base') if exists(exploded_java_base_module): mx.abort( 'Cannot derive a new JDK from ' + jdk.home + ' since it appears to be a developer build with exploded modules') jimage = join(jdk.home, 'lib', 'modules') jmods_dir = join(jdk.home, 'jmods') if not isfile(jimage): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jimage + ' is missing or is not an ordinary file') if not isdir(jmods_dir): mx.abort('Cannot derive a new JDK from ' + jdk.home + ' since ' + jmods_dir + ' is missing or is not a directory') jdk_modules = {jmd.name: jmd for jmd in jdk.get_modules()} modules = [as_java_module(dist, jdk) for dist in module_dists] all_module_names = frozenset( list(jdk_modules.keys()) + [m.name for m in modules]) # Read hashes stored in java.base (the only module in the JDK where hashes are stored) out = mx.LinesOutputCapture() mx.run([ jdk.exe_path('jmod'), 'describe', jdk_modules['java.base'].get_jmod_path() ], out=out) lines = out.lines hashes = {} for line in lines: if line.startswith('hashes'): parts = line.split() assert len( parts ) == 4, 'expected hashes line to have 4 fields, got {} fields: {}'.format( len(parts), line) _, module_name, algorithm, hash_value = parts hashes[module_name] = (algorithm, hash_value) build_dir = mx.ensure_dir_exists(join(dst_jdk_dir + ".build")) try: # Handle targets of qualified exports that are not present in `modules` target_requires = {} for jmd in modules: for targets in jmd.exports.values(): for target in targets: if target not in all_module_names and target not in hashes: target_requires.setdefault(target, set()).add(jmd.name) if target_requires and missing_export_target_action is not None: if missing_export_target_action == 'error': mx.abort( 'Target(s) of qualified exports cannot be resolved: ' + '.'.join(target_requires.keys())) assert missing_export_target_action == 'create', 'invalid value for missing_export_target_action: ' + str( missing_export_target_action) extra_modules = [] for name, requires in target_requires.items(): module_jar = join(build_dir, name + '.jar') jmd = JavaModuleDescriptor( name, {}, requires={module: [] for module in requires}, uses=set(), provides={}, jarpath=module_jar) extra_modules.append(jmd) module_build_dir = mx.ensure_dir_exists(join(build_dir, name)) module_info_java = join(module_build_dir, 'module-info.java') module_info_class = join(module_build_dir, 'module-info.class') with open(module_info_java, 'w') as fp: print(jmd.as_module_info(), file=fp) mx.run([ jdk.javac, '-d', module_build_dir, '--limit-modules=java.base,' + ','.join(jmd.requires.keys()), '--module-path=' + os.pathsep.join( (m.jarpath for m in modules)), module_info_java ]) with ZipFile(module_jar, 'w') as zf: zf.write(module_info_class, basename(module_info_class)) if exists(jmd.get_jmod_path()): os.remove(jmd.get_jmod_path()) mx.run([ jdk.javac.replace('javac', 'jmod'), 'create', '--class-path=' + module_build_dir, jmd.get_jmod_path() ]) modules.extend(extra_modules) all_module_names = frozenset( list(jdk_modules.keys()) + [m.name for m in modules]) # Extract src.zip from source JDK jdk_src_zip = join(jdk.home, 'lib', 'src.zip') dst_src_zip_contents = {} if isfile(jdk_src_zip): mx.logv('[Extracting ' + jdk_src_zip + ']') with ZipFile(jdk_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[name] = zf.read(name) else: mx.warn("'{}' does not exist or is not a file".format(jdk_src_zip)) # Edit lib/security/default.policy in java.base patched_java_base = join(build_dir, 'java.base.jmod') with open(join(jmods_dir, 'java.base.jmod'), 'rb') as src_f, open(patched_java_base, 'wb') as dst_f: jmod_header = src_f.read(4) if len(jmod_header) != 4 or jmod_header != b'JM\x01\x00': raise mx.abort("Unexpected jmod header: " + b2a_hex(jmod_header).decode('ascii')) dst_f.write(jmod_header) policy_result = 'not found' with ZipFile(src_f, 'r') as src_zip, ZipFile( dst_f, 'w', src_zip.compression) as dst_zip: for i in src_zip.infolist(): if i.filename[-1] == '/': continue src_member = src_zip.read(i) if i.filename == 'lib/security/default.policy': if 'grant codeBase "jrt:/com.oracle.graal.graal_enterprise"'.encode( 'utf-8') in src_member: policy_result = 'unmodified' else: policy_result = 'modified' src_member += """ grant codeBase "jrt:/com.oracle.graal.graal_enterprise" { permission java.security.AllPermission; }; """.encode('utf-8') dst_zip.writestr(i, src_member) if policy_result == 'not found': raise mx.abort( "Couldn't find `lib/security/default.policy` in " + join(jmods_dir, 'java.base.jmod')) for jmd in modules: # Remove existing sources for all the modules that we include dst_src_zip_contents = { key: dst_src_zip_contents[key] for key in dst_src_zip_contents if not key.startswith(jmd.name) } if with_source(jmd.dist): # Add the sources that we can share. # Extract module sources jmd_src_zip = jmd.jarpath[0:-len('.jar')] + '.src.zip' if isfile(jmd_src_zip): mx.logv('[Extracting ' + jmd_src_zip + ']') with ZipFile(jmd_src_zip, 'r') as zf: for name in zf.namelist(): if not name.endswith('/'): dst_src_zip_contents[jmd.name + '/' + name] = zf.read(name) # Add module-info.java to sources dst_src_zip_contents[jmd.name + '/module-info.java'] = jmd.as_module_info( extras_as_comments=False) # Now build the new JDK image with jlink jlink = [jdk.javac.replace('javac', 'jlink')] if jdk_enables_jvmci_by_default(jdk): # On JDK 9+, +EnableJVMCI forces jdk.internal.vm.ci to be in the root set jlink += ['-J-XX:-EnableJVMCI', '-J-XX:-UseJVMCICompiler'] if root_module_names is not None: missing = frozenset(root_module_names) - all_module_names if missing: mx.abort( 'Invalid module(s): {}.\nAvailable modules: {}'.format( ','.join(missing), ','.join(sorted(all_module_names)))) jlink.append('--add-modules=' + ','.join(root_module_names)) else: jlink.append('--add-modules=' + ','.join(sorted(all_module_names))) module_path = patched_java_base + os.pathsep + jmods_dir if modules: module_path = os.pathsep.join( (m.get_jmod_path(respect_stripping=True) for m in modules)) + os.pathsep + module_path jlink.append('--module-path=' + module_path) jlink.append('--output=' + dst_jdk_dir) # These options are derived from how OpenJDK runs jlink to produce the final runtime image. jlink.extend([ '-J-XX:+UseSerialGC', '-J-Xms32M', '-J-Xmx512M', '-J-XX:TieredStopAtLevel=1' ]) jlink.append('-J-Dlink.debug=true') if dedup_legal_notices: jlink.append('--dedup-legal-notices=error-if-not-same-content') jlink.append('--keep-packaged-modules=' + join(dst_jdk_dir, 'jmods')) if jdk_has_new_jlink_options(jdk): if jdk_omits_warning_for_jlink_set_ThreadPriorityPolicy(jdk): thread_priority_policy_option = ' -XX:ThreadPriorityPolicy=1' else: mx.logv('[Creating JDK without -XX:ThreadPriorityPolicy=1]') thread_priority_policy_option = '' if jdk_supports_enablejvmciproduct(jdk): if any( (m.name == 'jdk.internal.vm.compiler' for m in modules)): jlink.append( '--add-options=-XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCIProduct -XX:-UnlockExperimentalVMOptions' + thread_priority_policy_option) else: # Don't default to using JVMCI as JIT unless Graal is being updated in the image. # This avoids unexpected issues with using the out-of-date Graal compiler in # the JDK itself. jlink.append( '--add-options=-XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCIProduct -XX:-UseJVMCICompiler -XX:-UnlockExperimentalVMOptions' + thread_priority_policy_option) else: mx.logv('[Creating JDK without -XX:+EnableJVMCIProduct]') if thread_priority_policy_option: jlink.append('--add-options=' + thread_priority_policy_option.strip()) if vendor_info is not None: for name, value in vendor_info.items(): jlink.append('--' + name + '=' + value) # TODO: investigate the options below used by OpenJDK to see if they should be used: # --release-info: this allow extra properties to be written to the <jdk>/release file # --order-resources: specifies order of resources in generated lib/modules file. # This is apparently not so important if a CDS archive is available. # --generate-jli-classes: pre-generates a set of java.lang.invoke classes. # See https://github.com/openjdk/jdk/blob/master/make/GenerateLinkOptData.gmk mx.logv('[Creating JDK image in {}]'.format(dst_jdk_dir)) mx.run(jlink) dst_src_zip = join(dst_jdk_dir, 'lib', 'src.zip') mx.logv('[Creating ' + dst_src_zip + ']') with ZipFile(dst_src_zip, 'w', compression=ZIP_DEFLATED, allowZip64=True) as zf: for name, contents in sorted(dst_src_zip_contents.items()): zf.writestr(name, contents) mx.logv('[Copying static libraries]') lib_prefix = mx.add_lib_prefix('') lib_suffix = '.lib' if mx.is_windows() else '.a' lib_directory = join(jdk.home, 'lib') dst_lib_directory = join(dst_jdk_dir, 'lib') for f in os.listdir(lib_directory): if f.startswith(lib_prefix) and f.endswith(lib_suffix): lib_path = join(lib_directory, f) if isfile(lib_path): shutil.copy2(lib_path, dst_lib_directory) # Build the list of modules whose classes might have annotations # to be processed by native-image (GR-15192). with open(join(dst_jdk_dir, 'lib', 'native-image-modules.list'), 'w') as fp: print( '# Modules whose classes might have annotations processed by native-image', file=fp) for m in modules: print(m.name, file=fp) finally: if not mx.get_opts().verbose: # Preserve build directory so that javac command can be re-executed # by cutting and pasting verbose output. shutil.rmtree(build_dir) # Create CDS archive (https://openjdk.java.net/jeps/341). out = mx.OutputCapture() mx.logv('[Creating CDS shared archive]') if mx.run([ mx.exe_suffix(join(dst_jdk_dir, 'bin', 'java')), '-Xshare:dump', '-Xmx128M', '-Xms128M' ], out=out, err=out, nonZeroIsFatal=False) != 0: mx.log(out.data) mx.abort('Error generating CDS shared archive')
def run(self, inputFile, outputFile, flags): return mx.run([mx_sulong.findLLVMProgram('opt'), '-o', outputFile] + self.passes + [inputFile])
def _mxrun(args, cwd=_suite.dir, verbose=False, out=None, env=None): if verbose: mx.log('Running \'{}\''.format(' '.join(args))) status = mx.run(args, nonZeroIsFatal=False, cwd=cwd, out=out, env=env) if status: mx.abort(status)
def run_java(self, args, out=None, err=None, cwd=None, nonZeroIsFatal=False): if '-version' in args: return super(NativeImageVM, self).run_java(args, out=out, err=err, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal) else: config = NativeImageVM.BenchmarkConfig() original_java_run_args = config.parse(args) executable, classpath_arguments, system_properties, image_run_args = NativeImageVM.extract_benchmark_arguments( original_java_run_args) # Agent configuration and/or HotSpot profiling needs_config = (config.config_dir is None) and config.needs_config if needs_config or self.hotspot_pgo: config_vm_args = [] profiling_vm_args = [] config_run_args = [] profiling_run_args = [] if needs_config: config.config_dir = mx.mkdtemp(suffix='config', prefix='native-image') config_vm_args += [ '-agentlib:native-image-agent=config-output-dir=' + str(config.config_dir) ] config_run_args += config.extra_agent_run_args if self.hotspot_pgo: profiling_vm_args += [ '-Dgraal.ProfilesCollectExperimental=true' ] config_run_args = [] profiling_run_args = config.extra_profile_run_args agent_run_args = config_vm_args + profiling_vm_args + original_java_run_args + config.extra_run_args + config_run_args + profiling_run_args mx.log( 'Running with HotSpot to get the configuration files and profiles. This could take a while:' ) super(NativeImageVM, self).run_java(agent_run_args, out=None, err=None, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal) base_image_build_args = [ os.path.join(mx_vm.graalvm_home(fatalIfMissing=True), 'bin', 'native-image') ] base_image_build_args += ['--no-fallback'] base_image_build_args += ['-J-ea', '-J-esa'] base_image_build_args += system_properties base_image_build_args += classpath_arguments executable_name = (executable[1] if executable[0] is '-jar' else executable[0]).lower() base_image_build_args += executable base_image_build_args += ['-H:Name=' + executable_name] if needs_config: base_image_build_args += [ '-H:ConfigurationFileDirectories=' + config.config_dir ] base_image_build_args += config.extra_image_build_arguments # PGO instrumentation i = 0 while i < self.pgo_instrumented_iterations: instrument_args = [ '--pgo-instrument' ] + [] if i is 0 and not self.hotspot_pgo else ['--pgo'] instrument_image_build_args = base_image_build_args + instrument_args mx.log('Building the instrumentation image with: ') mx.log(' ' + ' '.join([ pipes.quote(str(arg)) for arg in instrument_image_build_args ])) mx.run(instrument_image_build_args, out=None, err=None, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal) image_run_cmd = [ os.path.abspath(executable_name) ] + image_run_args + config.extra_run_args + config.extra_profile_run_args mx.log('Running the instrumented image with: ') mx.log( ' ' + ' '.join([pipes.quote(str(arg)) for arg in image_run_cmd])) mx.run(image_run_cmd, out=out, err=err, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal) i += 1 # Build the final image pgo_args = ['--pgo'] if self.pgo_instrumented_iterations > 0 or self.hotspot_pgo else [] final_image_args = base_image_build_args + pgo_args mx.log('Building the final image with: ') mx.log( ' ' + ' '.join([pipes.quote(str(arg)) for arg in final_image_args])) mx.run(final_image_args, out=None, err=None, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal) # Execute the benchmark image_run_cmd = [os.path.abspath(executable_name) ] + image_run_args + config.extra_run_args mx.log('Running the produced native executable with: ') mx.log(' ' + ' '.join([pipes.quote(str(arg)) for arg in image_run_cmd])) mx.run(image_run_cmd, out=out, err=err, cwd=cwd, nonZeroIsFatal=nonZeroIsFatal)
def _fastr_gate_runner(args, tasks): with mx_gate.Task('Setup no specials', tasks, tags=[FastRGateTags.no_specials]) as t: if t: os.environ['FASTR_OPTION_UseSpecials'] = 'false' with mx_gate.Task('Setup no dsl cache', tasks, tags=[FastRGateTags.no_dsl_cache]) as t: if t: os.environ['FASTR_OPTION_DSLCacheSizeFactor'] = '0' with mx_gate.Task('SetupLLVM', tasks, tags=[FastRGateTags.llvm]) as t: if t: os.environ['FASTR_RFFI'] = 'llvm' with mx_gate.Task('GCTorture1', tasks, tags=[FastRGateTags.gc_torture1]) as t: if t: os.environ['FASTR_GCTORTURE'] = '1' with mx_gate.Task('GCTorture3', tasks, tags=[FastRGateTags.gc_torture3]) as t: if t: os.environ['FASTR_GCTORTURE'] = '3' with mx_gate.Task('VerySlowAsserts', tasks, tags=[FastRGateTags.very_slow_asserts]) as t: if t: os.environ['FASTR_TEST_VERY_SLOW_ASSERTS'] = 'true' ''' The specific additional gates tasks provided by FastR. ''' with mx_gate.Task('ExtSoftVersions', tasks, tags=[mx_gate.Tags.always]) as t: if t: new_env = os.environ.copy() new_env['R_DEFAULT_PACKAGES'] = 'base' run_r(['-q', '-e', 'extSoftVersion()'], 'R', env=new_env) with mx_gate.Task('LibsInfo', tasks, tags=[mx_gate.Tags.always]) as t: if t: mx.log("Libraries captured in FASTR_HOME/lib:") lib_dir = os.path.join(_fastr_suite.dir, 'lib') for f in os.listdir(lib_dir): full_path = os.path.join(lib_dir, f) mx.run(['file', full_path], nonZeroIsFatal=False) mx.run(['objdump', '-s', '--section', '.comment', full_path], nonZeroIsFatal=False) mx.log('------') # --------------------------------- # Style checks: # FastR has custom copyright check with mx_gate.Task('Copyright check', tasks, tags=[mx_gate.Tags.style]) as t: if t: if mx.checkcopyrights(['--primary']) != 0: t.abort('copyright errors') # check that the expected test output file is up to date with mx_gate.Task('UnitTests: ExpectedTestOutput file check', tasks, tags=[mx_gate.Tags.style]) as t: if t: mx_unittest.unittest([ '-Dfastr.test.gen.expected=' + _test_srcdir(), '-Dfastr.test.check.expected=true' ] + _gate_unit_tests()) # ---------------------------------- # Basic tests: with mx_gate.Task( 'UnitTests', tasks, tags=[FastRGateTags.basic_tests, FastRGateTags.unit_tests]) as t: if t: mx_unittest.unittest(_gate_noapps_unit_tests()) with mx_gate.Task('Rembedded', tasks, tags=[FastRGateTags.basic_tests]) as t: if t: if rembedtest([]) != 0: t.abort("Rembedded tests failed") # ---------------------------------- # Package tests: with mx_gate.Task('Recommended load test', tasks, tags=[FastRGateTags.recommended_load]) as t: if t: # Note: this is a convenience mx gate job for testing the loading of recommended packages # We also test the loading of recommended pkgs in the "graalvm-tests" if not os.path.exists( os.path.join(_fastr_suite.dir, 'library', 'spatial')): mx.abort( 'Recommended packages seem to be not installed in FastR. Did you forget to build with FASTR_RELEASE=true?' ) pkgs = [ 'codetools', 'MASS', 'boot', 'class', 'cluster', 'lattice', 'nnet', 'spatial', 'Matrix', 'KernSmooth', 'foreign', 'nlme', 'rpart', 'survival' ] # Creates code that looks like: require(codetools) && require(MASS) && ... require_stmts = ' && '.join( ['require(' + pkg + ')' for pkg in pkgs]) test_load = 'if (!(' + require_stmts + ')) q(status=1) else q(status=42)' if run_r(['--vanilla', '-e', test_load], 'R', nonZeroIsFatal=False) != 42: mx.abort("Loading of recommended packages failed") with mx_gate.Task('Internal pkg test', tasks, tags=[FastRGateTags.internal_pkgs_test]) as t: if t: internal_pkg_tests() # CRAN packages are listed in files com.oracle.truffle.r.test.packages/gated0, gated1, ... # We loop over all such files and crete gate task for each of them # See also documentation in FastRGateTags.cran_pkgs_tests for i in range(1, 1000): list_file = os.path.join( _fastr_suite.dir, 'com.oracle.truffle.r.test.packages/gated' + str(i)) if not os.path.exists(list_file): break with mx_gate.Task('CRAN pkg test: ' + str(i), tasks, tags=[FastRGateTags.cran_pkgs_test + str(i)]) as t: if t: check_last = False if mx_gate.Task.tags is None else FastRGateTags.cran_pkgs_test_check_last in mx_gate.Task.tags # pylint: disable=unsupported-membership-test if check_last: next_file = os.path.join( _fastr_suite.dir, 'com.oracle.truffle.r.test.packages/gated' + str(i + 1)) if os.path.exists(next_file): mx.abort( "File %s exists, but the gate thinks that %s is the last file. Did you forget to update the gate configuration?" % (next_file, list_file)) cran_pkg_tests(list_file)
def graalpython_gate_runner(args, tasks): with Task('GraalPython JUnit', tasks, tags=[GraalPythonTags.junit]) as task: if task: punittest(['--verbose']) with Task('GraalPython Python tests', tasks, tags=[GraalPythonTags.unittest]) as task: if task: gate_unittests() with Task('GraalPython C extension tests', tasks, tags=[GraalPythonTags.cpyext]) as task: if task: # we deliberately added this to test the combination of Sulong and 'mx_unittest' unittest([ '--regex', re.escape( 'com.oracle.graal.python.test.module.MemoryviewTest'), "-Dgraal.TraceTruffleCompilation=true" ]) gate_unittests(subdir="cpyext/") with Task('GraalPython C extension managed tests', tasks, tags=[GraalPythonTags.cpyext_managed]) as task: if task: mx.run_mx([ "--dynamicimports", "sulong-managed", "python-gate-unittests", "--llvm.configuration=managed", "--subdir=cpyext", "--" ]) with Task('GraalPython C extension sandboxed tests', tasks, tags=[GraalPythonTags.cpyext_sandboxed]) as task: if task: mx.run_mx([ "--dynamicimports", "sulong-managed", "python-gate-unittests", "--llvm.configuration=sandboxed", "--subdir=cpyext", "--" ]) with Task('GraalPython Python tests on SVM', tasks, tags=[GraalPythonTags.svmunit]) as task: if task: svm_image_name = python_svm(["-h"]) run_python_unittests(svm_image_name) with Task('GraalPython license header update', tasks, tags=[GraalPythonTags.license]) as task: if task: python_checkcopyrights([]) with Task('GraalPython GraalVM shared-library build', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.graalvm]) as task: if task: run_shared_lib_test() with Task('GraalPython GraalVM build', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.graalvm]) as task: if task: svm_image = python_svm(["--version"]) benchmark = os.path.join(PATH_MESO, "image-magix.py") out = mx.OutputCapture() mx.run([svm_image, benchmark], nonZeroIsFatal=True, out=mx.TeeOutputCapture(out)) success = "\n".join([ "[0, 0, 0, 0, 0, 0, 10, 10, 10, 0, 0, 10, 3, 10, 0, 0, 10, 10, 10, 0, 0, 0, 0, 0, 0]", ]) if success not in out.data: mx.abort('Output from generated SVM image "' + svm_image + '" did not match success pattern:\n' + success)
def build_binary_pkgs(args_in, **kwargs): ''' Builds binary packages of components that we cache for build speed-up. See the CI scripts for details. ''' parser = ArgumentParser() parser.add_argument( '--f2c-version', type=int, dest='f2c_version', required=True, help='Current version of f2c, the tarball will use this + 1') parser.add_argument( '--recommended-pkgs-version', default=0, type=int, dest='recommended_version', help= 'Current version of recommended packages binary, the tarball will use this + 1' ) parser.add_argument('--recommended-pkgs-list', dest='recommended_list', required=True, help='Comma separated list of recommended packages') args = parser.parse_args(args_in) os_name = platform.system().lower() dest_dir = os.path.join(_fastr_suite.dir, 'binary-packages') shutil.rmtree(dest_dir, ignore_errors=True) mx.ensure_dir_exists(dest_dir) # F2C # creates binary-packages/f2c-binary-{version}-{osname}-amd64/f2c with contents of FASTR_HOME/f2c f2c_name = 'f2c-binary-' + str(args.f2c_version + 1) + '-' + os_name + '-amd64' f2c_path = os.path.join(dest_dir, f2c_name) shutil.copytree(os.path.join(_fastr_suite.dir, 'f2c'), os.path.join(f2c_path, 'f2c')) # creates the tarball result_tarball = os.path.join(dest_dir, f2c_name + '.tar.gz') with tarfile.open(result_tarball, "w:gz") as tar: tar.add(f2c_path, arcname=os.path.basename(f2c_path)) mx.log("Binary package created at: " + result_tarball) # Recommended packages # creates binary-packages/fastr-recommended-pkgs-{version}-{osname}-amd64/fastr-recommended-pkgs pkgs_name = 'fastr-recommended-pkgs-' + str(args.recommended_version + 1) + '-' + os_name + '-amd64' pkgs_path = os.path.join(dest_dir, pkgs_name) pkgs_pkgs_path = os.path.join(pkgs_path, 'pkgs') mx.ensure_dir_exists(pkgs_pkgs_path) for pkg_name in args.recommended_list.split(','): shutil.copytree(os.path.join(_fastr_suite.dir, 'library', pkg_name), os.path.join(pkgs_pkgs_path, pkg_name)) # add file with API digest try: with open(os.path.join(pkgs_path, 'api-checksum.txt'), 'w') as f: sys.stdout = f pkgcache(['--print-api-checksum', '--vm', 'fastr']) finally: sys.stdout = sys.__stdout__ # creates the tarball result_tarball = os.path.join(dest_dir, pkgs_name + '.tar.gz') with tarfile.open(result_tarball, "w:gz") as tar: tar.add(pkgs_path, arcname=os.path.basename(pkgs_path)) mx.log("Binary package created at: " + result_tarball) mx.log("Contents of the " + dest_dir + "directory: ") mx.run(['ls', '-R', dest_dir]) return 0
def execute_command(self, final_command=False): write_output = final_command or self.is_gate self.exit_code = mx.run(self.command, out=self.stdout(write_output), err=self.stderr(write_output), cwd=self.cwd, nonZeroIsFatal=False)
def llvm_tool(args=None, out=None, **kwargs): if len(args) < 1: mx.abort("usage: mx llvm-tool <llvm-tool> [args...]") llvm_program = findBundledLLVMProgram(args[0]) mx.run([llvm_program] + args[1:], out=out, **kwargs)