def truffle_language_ensure(language_flag, version=None, native_image_root=None, early_exit=False, extract=True, debug_gr_8964=False): """ Ensures that we have a valid suite for the given language_flag, by downloading a binary if necessary and providing the suite distribution artifacts in the native-image directory hierachy (via symlinks). :param language_flag: native-image language_flag whose truffle-language we want to use :param version: if not specified and no TRUFFLE_<LANG>_VERSION set latest binary deployed master revision gets downloaded :param native_image_root: the native_image_root directory where the the artifacts get installed to :return: language suite for the given language_flag """ if not native_image_root: native_image_root = suite_native_image_root() version_env_var = 'TRUFFLE_' + language_flag.upper() + '_VERSION' if not version and os.environ.has_key(version_env_var): version = os.environ[version_env_var] if language_flag not in flag_suitename_map: mx.abort('No truffle-language uses language_flag \'' + language_flag + '\'') language_dir = join('languages', language_flag) if early_exit and exists(join(native_image_root, language_dir)): mx.logv('Early exit mode: Language subdir \'' + language_flag + '\' exists. Skip suite.import_suite.') return None language_entry = flag_suitename_map[language_flag] language_suite_name = language_entry[0] language_repo_name = language_entry[3] if len(language_entry) > 3 else None urlinfos = [ mx.SuiteImportURLInfo(mx_urlrewrites.rewriteurl('https://curio.ssw.jku.at/nexus/content/repositories/snapshots'), 'binary', mx.vc_system('binary')) ] failure_warning = None if not version and not mx.suite(language_suite_name, fatalIfMissing=False): # If no specific version requested use binary import of last recently deployed master version repo_suite_name = language_repo_name if language_repo_name else language_suite_name repo_url = mx_urlrewrites.rewriteurl('https://github.com/graalvm/{0}.git'.format(repo_suite_name)) version = mx.SuiteImport.resolve_git_branchref(repo_url, 'binary', abortOnError=False) if not version: failure_warning = 'Resolving \'binary\' against ' + repo_url + ' failed' language_suite = suite.import_suite( language_suite_name, version=version, urlinfos=urlinfos, kind=None, in_subdir=bool(language_repo_name) ) if not language_suite: if failure_warning: mx.warn(failure_warning) mx.abort('Binary suite not found and no local copy of ' + language_suite_name + ' available.') if not extract: if not exists(join(native_image_root, language_dir)): mx.abort('Language subdir \'' + language_flag + '\' should already exist with extract=False') return language_suite language_suite_depnames = language_entry[1] language_deps = language_suite.dists + language_suite.libs language_deps = [dep for dep in language_deps if dep.name in language_suite_depnames] native_image_layout(language_deps, language_dir, native_image_root, debug_gr_8964=debug_gr_8964) language_suite_nativedistnames = language_entry[2] language_nativedists = [dist for dist in language_suite.dists if dist.name in language_suite_nativedistnames] native_image_extract(language_nativedists, language_dir, native_image_root) option_properties = join(language_suite.mxDir, 'native-image.properties') target_path = remove_existing_symlink(join(native_image_root, language_dir, 'native-image.properties')) if exists(option_properties): if not exists(target_path): mx.logv('Add symlink to ' + str(option_properties)) symlink_or_copy(option_properties, target_path, debug_gr_8964=debug_gr_8964) else: native_image_option_properties('languages', language_flag, native_image_root) return language_suite
def testdownstream(suite, repoUrls, relTargetSuiteDir, mxCommands, branch=None): """ Tests a downstream repo against the current working directory state of `suite`. :param mx.Suite suite: the suite to test against the downstream repo :param list repoUrls: URLs of downstream repos to clone, the first of which is the repo being tested :param str relTargetSuiteDir: directory of the downstream suite to test relative to the top level directory of the downstream repo being tested :param list mxCommands: argument lists for the mx commands run in downstream suite being tested :param str branch: name of branch to look for in downstream repo(s) """ assert len(repoUrls) > 0 repoUrls = [mx_urlrewrites.rewriteurl(url) for url in repoUrls] workDir = join(suite.get_output_root(), 'testdownstream') # A mirror of each suites in the same repo as `suite` is created via copying rel_mirror = os.path.relpath(suite.dir, mx.SuiteModel.siblings_dir(suite.dir)) in_subdir = os.sep in rel_mirror suites_in_repo = [suite] if in_subdir: base = os.path.dirname(suite.dir) for e in os.listdir(base): candidate = join(base, e) if candidate != suite.dir: mxDir = mx._is_suite_dir(candidate) if mxDir: matches = [s for s in mx.suites() if s.dir == candidate] if len(matches) == 0: suites_in_repo.append( mx.SourceSuite(mxDir, primary=False, load=False)) else: suites_in_repo.append(matches[0]) for suite_in_repo in suites_in_repo: if suite_in_repo.vc_dir and suite_in_repo.dir != suite_in_repo.vc_dir: mirror = join(workDir, basename(suite_in_repo.vc_dir), suite_in_repo.name) else: mirror = join(workDir, suite_in_repo.name) if exists(mirror): shutil.rmtree(mirror) output_root = suite_in_repo.get_output_root() def ignore_output_root(d, names): mx.log('Copying ' + d) if d == os.path.dirname(output_root): mx.log('Omitting ' + output_root) return [os.path.basename(output_root)] return [] shutil.copytree(suite_in_repo.dir, mirror, ignore=ignore_output_root) targetDir = None for repoUrl in repoUrls: # Deduce a target name from the target URL url = urlparse(repoUrl) targetName = url.path if targetName.rfind('/') != -1: targetName = targetName[targetName.rfind('/') + 1:] if targetName.endswith('.git'): targetName = targetName[0:-len('.git')] repoWorkDir = join(workDir, targetName) git = mx.GitConfig() if exists(repoWorkDir): git.pull(repoWorkDir) else: git.clone(repoUrl, repoWorkDir) # See if there's a matching (non-master) branch and use it if there is if not branch: branch = git.git_command( suite.dir, ['rev-parse', '--abbrev-ref', 'HEAD']).strip() if branch != 'master': git.git_command(repoWorkDir, ['checkout', branch], abortOnError=False) if not targetDir: targetDir = repoWorkDir assert not isabs(relTargetSuiteDir) targetSuiteDir = join(targetDir, relTargetSuiteDir) assert targetSuiteDir.startswith(targetDir) mxpy = None if suite != mx._mx_suite else join(mirror, 'mx.py') for command in mxCommands: mx.logv('[running "mx ' + ' '.join(command) + '" in ' + targetSuiteDir + ']') mx.run_mx(command, targetSuiteDir, mxpy=mxpy)
def testdownstream(suite, repoUrls, relTargetSuiteDir, mxCommands, branch=None): """ Tests a downstream repo against the current working directory state of `suite`. :param mx.Suite suite: the suite to test against the downstream repo :param list repoUrls: URLs of downstream repos to clone, the first of which is the repo being tested :param str relTargetSuiteDir: directory of the downstream suite to test relative to the top level directory of the downstream repo being tested :param list mxCommands: argument lists for the mx commands run in downstream suite being tested :param str branch: name(s) of branch to look for in downstream repo(s) """ assert len(repoUrls) > 0 repoUrls = [mx_urlrewrites.rewriteurl(url) for url in repoUrls] workDir = join(suite.get_output_root(), 'testdownstream') # A mirror of each suites in the same repo as `suite` is created via copying rel_mirror = os.path.relpath(suite.dir, mx.SuiteModel.siblings_dir(suite.dir)) in_subdir = os.sep in rel_mirror suites_in_repo = [suite] if in_subdir: base = os.path.dirname(suite.dir) for e in os.listdir(base): candidate = join(base, e) if candidate != suite.dir: mxDir = mx._is_suite_dir(candidate) if mxDir: matches = [s for s in mx.suites() if s.dir == candidate] if len(matches) == 0: suites_in_repo.append( mx.SourceSuite(mxDir, primary=False, load=False)) else: suites_in_repo.append(matches[0]) if suite.vc: vc_metadir = mx._safe_path(mx.VC.get_vc(suite.vc_dir).metadir()) blacklist = {suite.vc_dir: [join(suite.vc_dir, vc_metadir)]} else: blacklist = {} for suite_in_repo in suites_in_repo: output_root = mx._safe_path(suite_in_repo.get_output_root()) blacklist.setdefault(dirname(output_root), []).append(output_root) def omitted_dirs(d, names): mx.log('Copying ' + d) to_omit = [] for blacklisted_dir in blacklist.get(d, []): mx.log('Omitting ' + blacklisted_dir) to_omit.append(basename(blacklisted_dir)) return to_omit if suite.vc_dir and suite.dir != suite.vc_dir: mirror = join(workDir, basename(suite.vc_dir)) else: mirror = join(workDir, suite.name) if exists(mirror): mx.rmtree(mirror) mx.copytree(suite.vc_dir, mirror, ignore=omitted_dirs, symlinks=True) targetDir = None for repoUrl in repoUrls: # Deduce a target name from the target URL url = _urllib_parse.urlparse(repoUrl) targetName = url.path if targetName.rfind('/') != -1: targetName = targetName[targetName.rfind('/') + 1:] if targetName.endswith('.git'): targetName = targetName[0:-len('.git')] repoWorkDir = join(workDir, targetName) git = mx.GitConfig() if exists(repoWorkDir): git.pull(repoWorkDir) else: git.clone(repoUrl, repoWorkDir) if branch is None: branch = [] elif isinstance(branch, str): branch = [branch] else: assert isinstance(branch, list) # fall back to the branch of the main repo active_branch = git.active_branch(suite.dir, abortOnError=False) if active_branch: branch.append(active_branch) updated = False for branch_name in branch: if git.update_to_branch(repoWorkDir, branch_name, abortOnError=False): updated = True break if not updated: mx.warn('Could not update {} to any of the following branches: {}'. format(repoWorkDir, ', '.join(branch))) if not targetDir: targetDir = repoWorkDir assert not isabs(relTargetSuiteDir) targetSuiteDir = join(targetDir, relTargetSuiteDir) assert targetSuiteDir.startswith(targetDir) mxpy = None if suite != mx._mx_suite else join(mirror, 'mx.py') for command in mxCommands: mx.logv('[running "mx ' + ' '.join(command) + '" in ' + targetSuiteDir + ']') mx.run_mx(command, targetSuiteDir, mxpy=mxpy)
def fetch_jdk(args): """ Installs a JDK based on the coordinates in `args`. See ``mx fetch-jdk --help`` for more info. Note that if a JDK already exists at the installation location denoted by `args`, no action is taken. :return str: the JAVA_HOME for the JDK at the installation location denoted by `args` """ settings = _parse_args(args) jdk_binary = settings["jdk-binary"] jdks_dir = settings["jdks-dir"] artifact = jdk_binary._folder_name final_path = jdk_binary.get_final_path(jdks_dir) url = mx_urlrewrites.rewriteurl(jdk_binary._url) sha_url = url + ".sha1" archive_name = jdk_binary._archive archive_target_location = join(jdks_dir, archive_name) if not is_quiet(): if not mx.ask_yes_no("Install {} to {}".format(artifact, final_path), default='y'): mx.abort("JDK installation canceled") if exists(final_path): if settings["keep-archive"]: mx.warn( "The --keep-archive option is ignored when the JDK is already installed." ) mx.log("Requested JDK is already installed at {}".format(final_path)) else: # Try to extract on the same file system as the target to be able to atomically move the result. with mx.TempDir(parent_dir=jdks_dir) as temp_dir: mx.log("Fetching {} archive from {}...".format(artifact, url)) archive_location = join(temp_dir, archive_name) mx._opts.no_download_progress = is_quiet() try: sha1_hash = mx._hashFromUrl(sha_url).decode('utf-8') except Exception as e: #pylint: disable=broad-except mx.abort('Error retrieving {}: {}'.format(sha_url, e)) mx.download_file_with_sha1(artifact, archive_location, [url], sha1_hash, archive_location + '.sha1', resolve=True, mustExist=True, sources=False) untar = mx.TarExtractor(archive_location) mx.log("Installing {} to {}...".format(artifact, final_path)) extracted_path = join(temp_dir, 'extracted') try: untar.extract(extracted_path) except: mx.rmtree(temp_dir, ignore_errors=True) mx.abort("Error parsing archive. Please try again") jdk_root_folder = _get_extracted_jdk_archive_root_folder( extracted_path) if settings["keep-archive"]: atomic_file_move_with_fallback(archive_location, archive_target_location) atomic_file_move_with_fallback( archive_location + '.sha1', archive_target_location + ".sha1") mx.log( "Archive is located at {}".format(archive_target_location)) atomic_file_move_with_fallback( join(extracted_path, jdk_root_folder), final_path) curr_path = final_path if exists(join(final_path, 'Contents', 'Home')): if settings["strip-contents-home"]: with mx.TempDir() as tmp_path: tmp_jdk = join(tmp_path, 'jdk') shutil.move(final_path, tmp_jdk) shutil.move(join(tmp_jdk, 'Contents', 'Home'), final_path) else: final_path = join(final_path, 'Contents', 'Home') alias = settings.get('alias') if alias: alias_full_path = join(jdks_dir, alias) if not exists(alias_full_path) or os.path.realpath( alias_full_path) != os.path.realpath(abspath(curr_path)): if os.path.islink(alias_full_path): os.unlink(alias_full_path) elif exists(alias_full_path): mx.abort( alias_full_path + ' exists and it is not an existing symlink so it can not be used for a new symlink. Please remove it manually.' ) if mx.can_symlink(): if isabs(alias): os.symlink(curr_path, alias_full_path) else: reldir = os.path.relpath(dirname(curr_path), dirname(alias_full_path)) if reldir == '.': alias_target = basename(curr_path) else: alias_target = join(reldir, basename(curr_path)) os.symlink(alias_target, alias_full_path) else: mx.copytree(curr_path, alias_full_path) final_path = alias_full_path mx.log("Run the following to set JAVA_HOME in your shell:") shell = os.environ.get("SHELL") if shell is None: shell = '' if not settings["strip-contents-home"] and exists( join(final_path, 'Contents', 'Home')): java_home = join(final_path, 'Contents', 'Home') else: java_home = final_path mx.log(get_setvar_format(shell) % ("JAVA_HOME", abspath(java_home))) return final_path
def graalpython_gate_runner(args, tasks): with Task('GraalPython JUnit', tasks, tags=[GraalPythonTags.junit]) as task: if task: punittest(['--verbose']) with Task('GraalPython Python tests', tasks, tags=[GraalPythonTags.unittest]) as task: if task: gate_unittests() with Task('GraalPython C extension tests', tasks, tags=[GraalPythonTags.cpyext]) as task: if task: # we deliberately added this to test the combination of Sulong and 'mx_unittest' unittest([ '--regex', re.escape( 'com.oracle.graal.python.test.module.MemoryviewTest'), "-Dgraal.TraceTruffleCompilation=true" ]) gate_unittests(subdir="cpyext/") with Task('GraalPython C extension managed tests', tasks, tags=[GraalPythonTags.cpyext_managed]) as task: if task: mx.run_mx([ "--dynamicimports", "sulong-managed", "python-gate-unittests", "--llvm.configuration=managed", "--subdir=cpyext", "--" ]) with Task('GraalPython C extension sandboxed tests', tasks, tags=[GraalPythonTags.cpyext_sandboxed]) as task: if task: mx.run_mx([ "--dynamicimports", "sulong-managed", "python-gate-unittests", "--llvm.configuration=sandboxed", "--subdir=cpyext", "--" ]) with Task('GraalPython Python tests on SVM', tasks, tags=[GraalPythonTags.svmunit]) as task: if task: svm_image_name = "./graalpython-svm" if not os.path.exists(svm_image_name): svm_image_name = python_svm(["-h"]) llvm_home = mx_subst.path_substitutions.substitute( '--native.Dllvm.home=<path:SULONG_LIBS>') args = [ "--python.CoreHome=%s" % os.path.join(SUITE.dir, "graalpython", "lib-graalpython"), "--python.StdLibHome=%s" % os.path.join(SUITE.dir, "graalpython", "lib-python/3"), llvm_home ] run_python_unittests(svm_image_name, args) with Task('GraalPython apptests', tasks, tags=[GraalPythonTags.apptests]) as task: if task: apprepo = os.environ["GRAALPYTHON_APPTESTS_REPO_URL"] _apptest_suite = SUITE.import_suite( "graalpython-apptests", version="1fc0e86a54cbe090d36f262c062d8f4eee8f2e6d", urlinfos=[ mx.SuiteImportURLInfo(mx_urlrewrites.rewriteurl(apprepo), "git", mx.vc_system("git")) ]) mx.log( " ".join(["Running", "mx"] + ["-p", _apptest_suite.dir, "graalpython-apptests"])) mx.run_mx(["-p", _apptest_suite.dir, "graalpython-apptests"]) with Task('GraalPython license header update', tasks, tags=[GraalPythonTags.license]) as task: if task: python_checkcopyrights([]) with Task('GraalPython GraalVM shared-library build', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.graalvm]) as task: if task: run_shared_lib_test() with Task('GraalPython GraalVM build', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.graalvm]) as task: if task: svm_image = python_svm(["--version"]) benchmark = os.path.join(PATH_MESO, "image-magix.py") out = mx.OutputCapture() mx.run([svm_image, benchmark], nonZeroIsFatal=True, out=mx.TeeOutputCapture(out)) success = "\n".join([ "[0, 0, 0, 0, 0, 0, 10, 10, 10, 0, 0, 10, 3, 10, 0, 0, 10, 10, 10, 0, 0, 0, 0, 0, 0]", ]) if success not in out.data: mx.abort('Output from generated SVM image "' + svm_image + '" did not match success pattern:\n' + success) llvm_home = mx_subst.path_substitutions.substitute( '--native.Dllvm.home=<path:SULONG_LIBS>') args = [ "--python.CoreHome=%s" % os.path.join(SUITE.dir, "graalpython", "lib-graalpython"), "--python.StdLibHome=%s" % os.path.join(SUITE.dir, "graalpython", "lib-python/3"), llvm_home ] run_python_unittests(svm_image, args)
def graalpython_gate_runner(args, tasks): with Task('GraalPython JUnit', tasks, tags=[GraalPythonTags.junit]) as task: if task: punittest(['--verbose']) with Task('GraalPython Python tests', tasks, tags=[GraalPythonTags.unittest]) as task: if task: gate_unittests() with Task('GraalPython C extension tests', tasks, tags=[GraalPythonTags.cpyext]) as task: if task: gate_unittests(subdir="cpyext/") with Task('GraalPython C extension managed tests', tasks, tags=[GraalPythonTags.cpyext_managed]) as task: if task: mx.run_mx(["--dynamicimports", "sulong-managed", "python-gate-unittests", "--llvm.configuration=managed", "--subdir=cpyext", "--"]) with Task('GraalPython C extension sandboxed tests', tasks, tags=[GraalPythonTags.cpyext_sandboxed]) as task: if task: mx.run_mx(["--dynamicimports", "sulong-managed", "python-gate-unittests", "--llvm.configuration=sandboxed", "--subdir=cpyext", "--"]) with Task('GraalPython Python tests on SVM', tasks, tags=[GraalPythonTags.svmunit]) as task: if task: svm_image_name = "./graalpython-svm" if not os.path.exists(svm_image_name): python_svm(["-h"]) else: _python_svm_unittest(svm_image_name) with Task('GraalPython apptests', tasks, tags=[GraalPythonTags.apptests]) as task: if task: apprepo = os.environ["GRAALPYTHON_APPTESTS_REPO_URL"] _apptest_suite = _suite.import_suite( "graalpython-apptests", version="f40fcf3af008d30a67e0dbc325a0d90f1e68f0c0", urlinfos=[mx.SuiteImportURLInfo(mx_urlrewrites.rewriteurl(apprepo), "git", mx.vc_system("git"))] ) mx.run_mx(["-p", _apptest_suite.dir, "graalpython-apptests"]) with Task('GraalPython license header update', tasks, tags=[GraalPythonTags.license]) as task: if task: python_checkcopyrights([]) with Task('GraalPython GraalVM shared-library build', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.graalvm]) as task: if task: run_shared_lib_test() with Task('GraalPython GraalVM build', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.graalvm]) as task: if task: svm_image = python_svm(["--version"]) benchmark = os.path.join(PATH_MESO, "image-magix.py") out = mx.OutputCapture() mx.run( [svm_image, benchmark], nonZeroIsFatal=True, out=mx.TeeOutputCapture(out) ) success = "\n".join([ "[0, 0, 0, 0, 0, 0, 10, 10, 10, 0, 0, 10, 3, 10, 0, 0, 10, 10, 10, 0, 0, 0, 0, 0, 0]", ]) if success not in out.data: mx.abort('Output from generated SVM image "' + svm_image + '" did not match success pattern:\n' + success)
def ensure_trufflelanguage(language, version): ''' Ensures that we have a valid suite for "language", by downloading a binary if necessary. Takes account "version" if not "None". Sets "session_language[language]" to the mx suite and returns it. ''' session_language = {} for _language in [ 'truffleruby', 'graal-js', 'sulong', 'graalpython', 'fastr' ]: session_language[_language] = mx.suite(_language, fatalIfMissing=False) mx.logv('Session languages: {}'.format(session_language)) if session_language[language]: mx.log('Reusing ' + language + '.version=' + str(session_language[language].version())) return session_language[language] if os.path.exists(os.path.join("..", "..", language)) or os.path.exists( os.path.join("..", "..", "main", "mx." + language)): language_suite = suite.import_suite(language) else: urlinfos = [ mx.SuiteImportURLInfo( mx_urlrewrites.rewriteurl( 'https://curio.ssw.jku.at/nexus/content/repositories/snapshots' ), 'binary', mx.vc_system('binary')) ] if not version: # If no specific version requested use binary import of last recently deployed master version version = 'git-bref:binary' urlinfos.append( mx.SuiteImportURLInfo( mx_urlrewrites.rewriteurl( 'https://github.com/graalvm/{0}.git'.format(language)), 'source', mx.vc_system('git'))) try: language_suite = suite.import_suite(language, version=version, urlinfos=urlinfos, kind=None) except (urllib2.URLError, SystemExit): language_suite = suite.import_suite(language) if language_suite: if version and session_language[ language] and version != session_language[ language].version(): mx.abort('Cannot switch to ' + language + '.version=' + str(version) + ' without maven access.') else: mx.log('No maven access. Using already downloaded ' + language + ' binary suite.') else: mx.abort('No maven access and no local copy of ' + language + ' binary suite available.') if not language_suite: mx.abort('Binary suite not found and no local copy of ' + language + ' available.') session_language[language] = language_suite return session_language[language]
def hsdis(args, copyToDir=None): """download the hsdis library This is needed to support HotSpot's assembly dumping features. By default it downloads the Intel syntax version, use the 'att' argument to install AT&T syntax.""" flavor = None if mx.get_arch() == "amd64": flavor = mx.get_env('HSDIS_SYNTAX') if flavor is None: flavor = 'intel' if 'att' in args: flavor = 'att' libpattern = mx.add_lib_suffix('hsdis-' + mx.get_arch() + '-' + mx.get_os() + '-%s') sha1s = { r'att\hsdis-amd64-windows-%s.dll': 'bcbd535a9568b5075ab41e96205e26a2bac64f72', r'att/hsdis-amd64-linux-%s.so': '36a0b8e30fc370727920cc089f104bfb9cd508a0', r'att/hsdis-amd64-darwin-%s.dylib': 'c1865e9a58ca773fdc1c5eea0a4dfda213420ffb', r'intel\hsdis-amd64-windows-%s.dll': '6a388372cdd5fe905c1a26ced614334e405d1f30', r'intel/hsdis-amd64-linux-%s.so': '0d031013db9a80d6c88330c42c983fbfa7053193', r'intel/hsdis-amd64-darwin-%s.dylib': '67f6d23cbebd8998450a88b5bef362171f66f11a', r'hsdis-aarch64-linux-%s.so': 'fcc9b70ac91c00db8a50b0d4345490a68e3743e1', } if flavor: flavoredLib = join(flavor, libpattern) else: flavoredLib = libpattern if flavoredLib not in sha1s: mx.warn( "hsdis with flavor '{}' not supported on this platform or architecture" .format(flavor)) return sha1 = sha1s[flavoredLib] lib = flavoredLib % (sha1) path = join(_suite.get_output_root(), lib) if not exists(path): sha1path = path + '.sha1' mx.download_file_with_sha1( 'hsdis', path, [ rewriteurl( 'https://lafo.ssw.uni-linz.ac.at/pub/graal-external-deps/hsdis/' + lib.replace(os.sep, '/')) ], sha1, sha1path, True, True, sources=False) overwrite = True if copyToDir is None: # Try install hsdis into JAVA_HOME overwrite = False jdk = mx.get_jdk() base = jdk.home if exists(join(base, 'jre')): base = join(base, 'jre') if mx.get_os() == 'darwin': copyToDir = join(base, 'lib') elif mx.get_os() == 'windows': copyToDir = join(base, 'bin') else: if jdk.javaCompliance >= '11': copyToDir = join(base, 'lib') else: copyToDir = join(base, 'lib', mx.get_arch()) if exists(copyToDir): dest = join(copyToDir, mx.add_lib_suffix('hsdis-' + mx.get_arch())) if exists(dest) and not overwrite: import filecmp # Only issue warning if existing lib is different if filecmp.cmp(path, dest) is False: mx.warn('Not overwriting existing {} with {}'.format( dest, path)) else: try: shutil.copy(path, dest) mx.log('Copied {} to {}'.format(path, dest)) except IOError as e: mx.warn('Could not copy {} to {}: {}'.format( path, dest, str(e)))
def graalpython_gate_runner(args, tasks): _graalpytest_driver = "graalpython/com.oracle.graal.python.test/src/graalpytest.py" _test_project = "graalpython/com.oracle.graal.python.test/" with Task('GraalPython JUnit', tasks, tags=[GraalPythonTags.junit]) as task: if task: punittest(['--verbose']) with Task('GraalPython Python tests', tasks, tags=[GraalPythonTags.unittest]) as task: if task: test_args = [ _graalpytest_driver, "-v", _test_project + "src/tests/" ] mx.command_function( "python")(["--python.CatchAllExceptions=true"] + test_args) if platform.system() != 'Darwin': # TODO: re-enable when python3 is available on darwin mx.log("Running tests with CPython") mx.run(["python3"] + test_args, nonZeroIsFatal=True) with Task('GraalPython C extension tests', tasks, tags=[GraalPythonTags.cpyext]) as task: if task: test_args = [ _graalpytest_driver, "-v", _test_project + "src/tests/cpyext/" ] mx.command_function("python")(test_args) if platform.system() != 'Darwin': # TODO: re-enable when python3 is available on darwin mx.log("Running tests with CPython") mx.run(["python3"] + test_args, nonZeroIsFatal=True) with Task('GraalPython Python tests on SVM', tasks, tags=[GraalPythonTags.svmunit]) as task: if task: if not os.path.exists("./graalpython-svm"): python_svm(["-h"]) if os.path.exists("./graalpython-svm"): langhome = mx_subst.path_substitutions.substitute( '--native.Dllvm.home=<path:SULONG_LIBS>') # tests root directory tests_folder = "graalpython/com.oracle.graal.python.test/src/tests/" # list of excluded tests excluded = ["test_interop.py"] def is_included(path): if path.endswith(".py"): basename = path.rpartition("/")[2] return basename.startswith( "test_") and basename not in excluded return False # list all 1st-level tests and exclude the SVM-incompatible ones testfiles = [] paths = [tests_folder] while paths: path = paths.pop() if is_included(path): testfiles.append(path) else: try: paths += [ (path + f if path.endswith("/") else "%s/%s" % (path, f)) for f in os.listdir(path) ] except OSError: pass test_args = [ "graalpython/com.oracle.graal.python.test/src/graalpytest.py", "-v" ] + testfiles mx.run([ "./graalpython-svm", "--python.CoreHome=graalpython/lib-graalpython", "--python.StdLibHome=graalpython/lib-python/3", langhome ] + test_args, nonZeroIsFatal=True) with Task('GraalPython downstream R tests', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.R]) as task: script_r2p = os.path.join(_suite.dir, "graalpython", "benchmarks", "src", "benchmarks", "interop", "r_python_image_demo.r") script_p2r = os.path.join(_suite.dir, "graalpython", "benchmarks", "src", "benchmarks", "interop", "python_r_image_demo.py") pythonjars = os.pathsep.join([ os.path.join(_suite.dir, "mxbuild", "dists", "graalpython.jar"), os.path.join(_suite.dir, "mxbuild", "dists", "graalpython-env.jar") ]) if task: rrepo = os.environ["FASTR_REPO_URL"] testdownstream(_suite, [ rrepo, mx.suite("truffle").vc._remote_url( mx.suite("truffle").dir, "origin") ], ".", [[ "--dynamicimports", "graalpython", "--version-conflict-resolution", "latest_all", "build", "--force-deprecation-as-warning" ], [ "--cp-sfx", pythonjars, "r", "--polyglot", "--file=%s" % script_r2p ]]) testdownstream(_suite, [ rrepo, mx.suite("truffle").vc._remote_url( mx.suite("truffle").dir, "origin") ], ".", [[ "--dynamicimports", "graalpython", "--version-conflict-resolution", "latest_all", "build", "--force-deprecation-as-warning" ], [ "-v", "--cp-sfx", pythonjars, "r", "--jvm", "--polyglot", "-e", "eval.polyglot('python', path='%s')" % str(script_p2r) ]]) with Task('GraalPython apptests', tasks, tags=[GraalPythonTags.apptests]) as task: if task: apprepo = os.environ["GRAALPYTHON_APPTESTS_REPO_URL"] _apptest_suite = _suite.import_suite( "graalpython-apptests", urlinfos=[ mx.SuiteImportURLInfo(mx_urlrewrites.rewriteurl(apprepo), "git", mx.vc_system("git")) ]) mx.run_mx(["-p", _apptest_suite.dir, "graalpython-apptests"]) with Task('GraalPython license header update', tasks, tags=[GraalPythonTags.license]) as task: if task: python_checkcopyrights([]) with Task('GraalPython GraalVM shared-library build', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.graalvm]) as task: if task: run_shared_lib_test() with Task('GraalPython GraalVM build', tasks, tags=[GraalPythonTags.downstream, GraalPythonTags.graalvm]) as task: if task: svm_image = python_svm(["--version"]) benchmark = os.path.join("graalpython", "benchmarks", "src", "benchmarks", "image_magix.py") out = mx.OutputCapture() mx.run([svm_image, benchmark], nonZeroIsFatal=True, out=mx.TeeOutputCapture(out)) success = "\n".join([ "[0, 0, 0, 0, 0, 0, 20, 20, 20, 0, 0, 20, 20, 20, 0, 0, 20, 20, 20, 0, 0, 0, 0, 0, 0]", "[11, 12, 13, 14, 15, 21, 22, 23, 24, 25, 31, 32, 33, 34, 35, 41, 42, 43, 44, 45, 51, 52, 53, 54, 55]", "[11, 12, 13, 14, 15, 21, 22, 23, 24, 25, 31, 32, 36, 36, 35, 41, 41, 40, 40, 45, 51, 52, 53, 54, 55]" ]) if success not in out.data: mx.abort('Output from generated SVM image "' + svm_image + '" did not match success pattern:\n' + success) for name, iterations in sorted(python_test_benchmarks.iteritems()): with Task('PythonBenchmarksTest:' + name, tasks, tags=[GraalPythonTags.benchmarks]) as task: if task: _gate_python_benchmarks_tests( "graalpython/benchmarks/src/benchmarks/" + name + ".py", iterations)
def coverage_upload(args): parser = ArgumentParser(prog='mx coverage-upload') parser.add_argument('--upload-url', required=False, default=mx.get_env('COVERAGE_UPLOAD_URL'), help='Format is like rsync: user@host:/directory') parser.add_argument('--build-name', required=False, default=mx.get_env('BUILD_NAME')) parser.add_argument('--build-url', required=False, default=mx.get_env('BUILD_URL')) parser.add_argument('--build-number', required=False, default=mx.get_env('BUILD_NUMBER')) args, other_args = parser.parse_known_args(args) if not args.upload_url: parser.print_help() return remote_host, remote_basedir = args.upload_url.split(':') if not remote_host: mx.abort('Cannot determine remote host from {}'.format(args.upload_url)) primary = mx.primary_suite() info = primary.vc.parent_info(primary.dir) rev = primary.vc.parent(primary.dir) if len(remote_basedir) > 0 and not remote_basedir.endswith('/'): remote_basedir += '/' remote_dir = '{}_{}_{}'.format(primary.name, datetime.datetime.fromtimestamp(info['author-ts']).strftime('%Y-%m-%d_%H_%M'), rev[:7]) if args.build_name: remote_dir += '_' + args.build_name if args.build_number: remote_dir += '_' + args.build_number upload_dir = remote_basedir + remote_dir includes, excludes = _jacocoreport(['--omit-excluded'] + other_args) # Upload jar+sources coverage_sources = 'java_sources.tar.gz' coverage_binaries = 'java_binaries.tar.gz' with mx.Archiver(os.path.realpath(coverage_sources), kind='tgz') as sources, mx.Archiver(os.path.realpath(coverage_binaries), kind='tgz') as binaries: def _visit_deps(dep, edge): if dep.isJavaProject() and not dep.is_test_project(): binaries.zf.add(dep.output_dir(), dep.name) for d in dep.source_dirs(): sources.zf.add(d, dep.name) if os.path.exists(dep.source_gen_dir()): sources.zf.add(dep.source_gen_dir(), dep.name) mx.walk_deps(mx.projects(), visit=_visit_deps) files = [get_jacoco_dest_file(), 'coverage', coverage_sources, coverage_binaries] print("Syncing {} to {}:{}".format(" ".join(files), remote_host, upload_dir)) mx.run([ 'bash', '-c', r'tar -czf - {files} | ssh {remote} bash -c \'"mkdir -p {remotedir} && cd {remotedir} && cat | tar -x{verbose}z && chmod -R 755 ."\'' .format( files=" ".join(files), remote=remote_host, remotedir=upload_dir, verbose='v' if mx._opts.verbose else '') ]) def upload_string(content, path): mx.run(['ssh', remote_host, 'bash', '-c', 'cat > "' + path + '"'], stdin=content) upload_string(json.dumps({ 'timestamp': time.time(), 'suite': primary.name, 'revision': rev, 'directory': remote_dir, 'build_name': args.build_name, 'build_url': args.build_url, 'jdk_version': str(mx.get_jdk().version), 'build_number': args.build_number, 'primary_info': info, 'excludes': [str(e) for e in excludes], 'includes': [str(i) for i in includes]}), upload_dir + '/description.json') mx.run(['ssh', remote_host, 'bash', '-c', r'"(echo \[; for i in {remote_basedir}/*/description.json; do if \[ -s \$i \];then cat \$i; echo ,; fi done; echo null\]) > {remote_basedir}/index.json"'.format(remote_basedir=remote_basedir)]) upload_string("""<html> <script language="javascript"> function urlChange(url) { if (url.pathname !== "blank") { window.history.replaceState(null, null, url.pathname.replace("/coverage_upload/", "/coverage_upload/#")) } } </script> <frameset rows="40,*"> <frame id="navigation" src="navigation.html"/> <frame id="content" src="" onload="urlChange(this.contentWindow.location);" /> </frameset> </html>""", remote_basedir + '/index.html') js_library_url = rewriteurl("https://ajax.googleapis.com/ajax/libs/angularjs/1.7.7/angular.js") upload_string(r"""<html> <head> <script src="%js_library_url"></script> <script language="javascript"> var App = angular.module('myApp', []) .controller('IndexCtrl', function IndexCtrl($scope, $http) { var hash = parent.window.location.hash; if(hash) { hash = hash.substring(1, hash.length); // remove leading hash } $http.get('index.json').then(function(response, status) { var data = response.data.filter(x => x != null); /* #GR-17399 Filter build that are unique per suite with revision as key and merge builds. */ data = data .filter(x => !x.hasOwnProperty('merge')) .filter( // filter builds that are unique per suite with revision as key x => !data .filter(z => x != z && x.suite == z.suite) // exclude self build and build for other suites. .map(z => z.revision) // map from array of build to array of revision .includes(x.revision) // check if revision of x is index data. ).concat(data.filter(x => x.hasOwnProperty('merge'))); // concat unique build with merged build. data.sort((l,r) => r.timestamp - l.timestamp); if(data.length > 0) { var startdir; if(hash) { startdir = data.find(build => hash.includes(build.directory)); startdir.hash = hash; } if(!startdir) { startdir = data[0]; } $scope.directory = startdir; } $scope.data = data; }); $scope.$watch('directory', (dir, olddir) => { if(dir) { var content = parent.document.getElementById("content"); var contentDocument = content.contentDocument || content.contentWindow.document; var newpath; if(olddir && olddir.suite === dir.suite) { newpath = contentDocument.location.href.replace(olddir.directory, dir.directory); } else { newpath = dir.hasOwnProperty('hash') ? hash : dir.directory + "/coverage/"; } contentDocument.location.href = newpath; parent.window.history.replaceState(undefined, undefined, "#" + newpath.replace(/^.+coverage_upload\//, "")); } }); $scope.step = (i) => $scope.directory = $scope.data[$scope.data.indexOf($scope.directory)+i]; }); function copy(url) { var content = parent.document.getElementById("content"); var contentDocument = content.contentDocument || content.contentWindow.document; var copyText = document.getElementById("copy"); copyText.value = contentDocument.location.href.replace("coverage_upload/", "coverage_upload/#"); copyText.select(); document.execCommand("copy"); } </script> </head> <body ng-app="myApp" ng-controller="IndexCtrl"> <button ng-click="step(1)" ng-disabled="data.indexOf(directory) >= data.length-1"><<</button> <button ng-click="step(-1)" ng-disabled="data.indexOf(directory) <= 0">>></button> <select ng-model="directory" ng-options="(i.primary_info['author-ts']*1000|date:'yy-MM-dd hh:mm') + ' ' + i.build_name + ' ' + i.revision.substr(0,8) group by i.suite for i in data"></select> <a href="{{directory.build_url}}" ng-if="directory.build_url" target="_blank">Build</a> Commit: {{directory.revision.substr(0,5)}}: {{directory.primary_info.description}} <input type="text" style="opacity: 0;width: 20;" id="copy" /> <button style="float: right;" onclick="copy(window.location);">Share url</button> </body> </html>""".replace("%js_library_url", js_library_url), remote_basedir + '/navigation.html')
def truffle_language_ensure(language_flag, version=None, native_image_root=None): """ Ensures that we have a valid suite for the given language_flag, by downloading a binary if necessary and providing the suite distribution artifacts in the native-image directory hierachy (via symlinks). :param language_flag: native-image language_flag whose truffle-language we want to use :param version: if not specified and no TRUFFLE_<LANG>_VERSION set latest binary deployed master revision gets downloaded :param native_image_root: the native_image_root directory where the the artifacts get installed to :return: language suite for the given language_flag """ if not native_image_root: native_image_root = suite_native_image_root() version_env_var = 'TRUFFLE_' + language_flag.upper() + '_VERSION' if not version and os.environ.has_key(version_env_var): version = os.environ[version_env_var] if language_flag not in flag_suitename_map: mx.abort('No truffle-language uses language_flag \'' + language_flag + '\'') language_entry = flag_suitename_map[language_flag] language_suite_name = language_entry[0] language_repo_name = language_entry[3] if len(language_entry) > 3 else None urlinfos = [ mx.SuiteImportURLInfo(mx_urlrewrites.rewriteurl('https://curio.ssw.jku.at/nexus/content/repositories/snapshots'), 'binary', mx.vc_system('binary')) ] if not version: # If no specific version requested use binary import of last recently deployed master version version = 'git-bref:binary' repo_suite_name = language_repo_name if language_repo_name else language_suite_name urlinfos.append( mx.SuiteImportURLInfo( mx_urlrewrites.rewriteurl('https://github.com/graalvm/{0}.git'.format(repo_suite_name)), 'source', mx.vc_system('git') ) ) language_suite = suite.import_suite( language_suite_name, version=version, urlinfos=urlinfos, kind=None, in_subdir=bool(language_repo_name) ) if not language_suite: mx.abort('Binary suite not found and no local copy of ' + language_suite_name + ' available.') language_dir = join('languages', language_flag) language_suite_depnames = language_entry[1] language_deps = [dep for dep in language_suite.dists + language_suite.libs if dep.name in language_suite_depnames] native_image_layout(language_deps, language_dir, native_image_root) language_suite_nativedistnames = language_entry[2] language_nativedists = [dist for dist in language_suite.dists if dist.name in language_suite_nativedistnames] native_image_extract(language_nativedists, language_dir, native_image_root) option_properties = join(language_suite.mxDir, 'native-image.properties') target_path = join(native_image_root, language_dir, 'native-image.properties') if islink(target_path): os.remove(target_path) if exists(option_properties): mx.logv('Add symlink to ' + str(option_properties)) relsymlink(option_properties, target_path) else: native_image_option_properties('languages', language_flag, native_image_root) return language_suite
def fetch_jdk(args): """fetches required JDK version If mx is not passed the --quiet flag, menu will be printed for available JDK selection. """ args = _parse_fetchsettings(args) distribution = args["java-distribution"] base_path = args["base-path"] artifact = distribution.get_folder_name() final_path = distribution.get_final_path(base_path) url = mx_urlrewrites.rewriteurl(distribution.get_url()) sha_url = url + ".sha1" archive_name = distribution.get_archive_name() archive_target_location = join(base_path, archive_name) if not is_quiet(): if not mx.ask_yes_no("Install {} to {}".format(artifact, final_path), default='y'): mx.abort("JDK installation canceled") if exists(final_path): if args["keep-archive"]: mx.warn( "The --keep-archive option is ignored when the JDK is already installed." ) mx.log("Requested JDK is already installed at {}".format(final_path)) else: # Try to extract on the same file system as the target to be able to atomically move the result. with mx.TempDir(parent_dir=base_path) as temp_dir: mx.log("Fetching {} archive from {}...".format(artifact, url)) archive_location = join(temp_dir, archive_name) mx._opts.no_download_progress = is_quiet() sha1_hash = mx._hashFromUrl(sha_url).decode('utf-8') mx.download_file_with_sha1(artifact, archive_location, [url], sha1_hash, archive_location + '.sha1', resolve=True, mustExist=True, sources=False) untar = mx.TarExtractor(archive_location) mx.log("Installing {} to {}...".format(artifact, final_path)) extracted_path = join(temp_dir, 'extracted') try: untar.extract(extracted_path) except: mx.rmtree(temp_dir, ignore_errors=True) mx.abort("Error parsing archive. Please try again") jdk_root_folder = get_extracted_jdk_archive_root_folder( extracted_path) if args["keep-archive"]: atomic_file_move_with_fallback(archive_location, archive_target_location) atomic_file_move_with_fallback( archive_location + '.sha1', archive_target_location + ".sha1") mx.log( "Archive is located at {}".format(archive_target_location)) atomic_file_move_with_fallback( join(extracted_path, jdk_root_folder), final_path) curr_path = final_path if mx.is_darwin() and exists(join(final_path, 'Contents', 'Home')): if args["strip-contents-home"]: with mx.TempDir() as tmp_path: shutil.move(final_path, join(tmp_path, 'jdk')) shutil.move(join(tmp_path, 'jdk', 'Contents', 'Home'), final_path) else: final_path = join(final_path, 'Contents', 'Home') if "alias" in args: alias_full_path = join(base_path, args["alias"]) if os.path.islink(alias_full_path): os.unlink(alias_full_path) elif exists(alias_full_path): mx.abort( alias_full_path + ' exists and it is not an existing symlink so it can not be used for a new symlink. Please remove it manually.' ) if not (mx.is_windows() or mx.is_cygwin()): os.symlink(abspath(curr_path), alias_full_path) else: mx.copytree(curr_path, alias_full_path, symlinks=True) # fallback for windows final_path = alias_full_path mx.log("Run the following to set JAVA_HOME in your shell:") shell = os.environ.get("SHELL") if shell is None: shell = '' mx.log(get_setvar_format(shell) % ("JAVA_HOME", abspath(final_path)))