Ejemplo n.º 1
0
def test_python_smoke(args):
    """
    Just a smoke test for now.
    """
    if len(args) != 1:
        mx.abort('mx svm_test_python <python_svm_image_path>')

    out = mx.OutputCapture()
    err = mx.OutputCapture()
    expected_output = "Hello from Python"
    with tempfile.NamedTemporaryFile() as f:
        f.write("print('%s')\n" % expected_output)
        f.flush()
        os.system("ls -l %s" % args[0])
        os.system("ls -l %s" % f.name)
        exitcode = mx.run([args[0], f.name],
                          nonZeroIsFatal=False,
                          out=out,
                          err=err)
        if exitcode != 0:
            mx.abort("Python binary failed to execute: out=" + out.data +
                     " err=" + err.data)
        if out.data != expected_output + "\n":
            mx.abort("Python smoke test failed")
        mx.log("Python binary says: " + out.data)
Ejemplo n.º 2
0
def run_mg_internal(args,
                    verbose=False,
                    extraVmArgs=None,
                    env=None,
                    jdk=None,
                    **kwargs):
    vmArgs, mgArgs = mx.extract_VM_args(args)
    vmArgs = ['-cp', mx.classpath(["edu.uci.megaguards"])]
    vmArgs.append("edu.uci.megaguards.shell.MGMain")
    if not jdk:
        jdk = get_jdk()
    out = mx.OutputCapture()
    _out = out if not verbose else mx.TeeOutputCapture(out)
    out_err = mx.OutputCapture()
    _out_err = out if not verbose else mx.TeeOutputCapture(out_err)
    n = 3
    for t in range(n):
        retcode = mx.run_java(vmArgs + [mgArgs],
                              out=_out,
                              err=_out_err,
                              jdk=jdk,
                              **kwargs)
        if retcode == 0:
            break
    return out.data
Ejemplo n.º 3
0
    def runTester(self):
        config = self.loadConfiguration(self.benchmarkName())
        wrkDirectory = self.getLibraryDirectory()
        if mx.get_os() == "linux":
            distro = "linux"
        elif mx.get_os() == "darwin":
            distro = "macos"
        else:
            mx.abort("{0} not supported in {1}.".format(
                BaseWrkBenchmarkSuite.__name__, mx.get_os()))

        wrkPath = os.path.join(wrkDirectory, "wrk-{os}".format(os=distro))
        wrkFlags = self.setupWrkCmd(config)

        warmupDuration = None
        if self.inNativeMode():
            warmupDuration = config.get("warmup-duration-native-image", None)
        elif "warmup-duration" in config:
            warmupDuration = config["warmup-duration"]
        if warmupDuration:
            warmupWrkCmd = [wrkPath] + ["--duration",
                                        str(warmupDuration)] + wrkFlags
            mx.log("Warming up with Wrk: {0}".format(warmupWrkCmd))
            warmupOutput = mx.TeeOutputCapture(mx.OutputCapture())
            mx.run(warmupWrkCmd, out=warmupOutput, err=warmupOutput)

        if "duration" in config:
            wrkFlags = ["--duration", str(config["duration"])] + wrkFlags

        runWrkCmd = [wrkPath] + wrkFlags
        mx.log("Running Wrk: {0}".format(runWrkCmd))
        self.testerOutput = mx.TeeOutputCapture(mx.OutputCapture())
        mx.run(runWrkCmd, out=self.testerOutput, err=self.testerOutput)
Ejemplo n.º 4
0
def check_aot(classpath,
              main_class,
              common_opts,
              expected_output,
              lib_module,
              program_args=None):
    aot_opts = [
        '-XX:+UnlockDiagnosticVMOptions', '-XX:+UseAOTStrictLoading',
        '-XX:AOTLibrary=' + lib_module.name
    ]

    program_args = program_args or []

    # Check AOT library is loaded.
    out = mx.OutputCapture()
    mx_compiler.run_vm(common_opts + aot_opts + ['-XX:+PrintAOT', '-version'],
                       out=out,
                       err=out,
                       nonZeroIsFatal=False)
    if 'aot library' not in out.data:
        mx.abort(
            "Missing expected 'aot library' in -XX:+PrintAOT -version output. VM Output:\n"
            + str(out.data))

    # Run main_class+AOT modules and check output.
    aot_out = mx.OutputCapture()
    mx_compiler.run_vm(common_opts + aot_opts +
                       ['-cp', classpath, main_class] + program_args,
                       out=aot_out)

    if expected_output != aot_out.data:
        mx.abort('Outputs differ, expected `{}` != `{}`'.format(
            expected_output, aot_out.data))
Ejemplo n.º 5
0
def check_megaguards(device='GPU',
                     verbose=False,
                     cmd=['mx', 'python'],
                     testprogram='check_mg.py'):
    megaguards_opt = [
        '--mg-target=' + device.lower(), '--mg-log=eyxd',
        '--mg-target-threshold=1'
    ]
    check_python_program = [
        _suite.dir + os.sep + 'tests' + os.sep + testprogram
    ]
    n = 3
    for t in range(n):
        out = mx.OutputCapture()
        _out = out if not verbose else mx.TeeOutputCapture(out)
        out_err = mx.OutputCapture()
        _out_err = out if not verbose else mx.TeeOutputCapture(out_err)
        print_progress('Testing OpenCL device %s accessibility' % device)
        retcode = mx.run(cmd + check_python_program + megaguards_opt,
                         out=_out,
                         err=_out_err,
                         nonZeroIsFatal=False)
        if retcode == 0:
            break
        else:
            print("Execution failed.. retry %d of %d" % (t + 1, n))

    successRe = r"Execution Target:.+" + device + r""
    if not re.search(successRe, out.data, re.MULTILINE):
        print_error(opencl_err.format(device))
        return False
    else:
        print_ok("OpenCL device {0} has been detected!".format(device))
        return True
Ejemplo n.º 6
0
def get_megaguards_junit_status(verbose=False):
    is_ok = get_megaguards_home_dir(check_only=True)
    is_ok = is_ok and get_megaguards_build_dir(check_only=True)
    is_ok = is_ok and get_megaguards_polyhedral_ld(check_only=True)
    is_ok = is_ok and get_megaguards_test_dataset(check_only=True)
    if is_ok:
        n = 3
        for t in range(n):
            out = mx.OutputCapture()
            _out = out if not verbose else mx.TeeOutputCapture(out)
            out_err = mx.OutputCapture()
            _out_err = out if not verbose else mx.TeeOutputCapture(out_err)
            print_progress(
                "Performing MegaGuards (core) junit tests.. (note: run 'mx junit-mg' for complete MegaGuards junit tests)"
            )
            retcode = mx.run(['mx', 'junit-mg-core'],
                             out=_out,
                             err=_out_err,
                             nonZeroIsFatal=False)
            if retcode == 0:
                break
            else:
                print_progress("Test failed.. retry %d of %d" % (t + 1, n))
        if retcode == 0:
            print_ok('MegaGuards core junit tests')
        else:
            print_warn('MegaGuards core junit tests encountered some errors.')

        is_ok = is_ok and retcode == 0

    return is_ok
Ejemplo n.º 7
0
def _vm_home(config):
    if config not in _vm_homes:
        # get things initialized (e.g., cloning)
        _mx_vm(['graalvm-home'], config, out=mx.OutputCapture())
        capture = mx.OutputCapture()
        _mx_vm(['graalvm-home'], config, out=capture, quiet=True)
        _vm_homes[config] = capture.data.strip()
    return _vm_homes[config]
Ejemplo n.º 8
0
 def benchmark_callback(suite, commit):
     suite.vc.update_to_branch(suite.vc_dir, commit)
     mx.run_mx(['sforceimports'], suite=suite)
     mx.run_mx(['--env', 'ce', 'sforceimports'], suite=get_suite('/vm'))
     if args.enterprise and suite.name != 'vm-enterprise':
         checkout_args = [
             '--dynamicimports', '/vm-enterprise', 'checkout-downstream',
             'vm', 'vm-enterprise'
         ]
         if fetched_enterprise[0]:
             checkout_args.append('--no-fetch')
         mx.run_mx(checkout_args, out=mx.OutputCapture())
         # Make sure vm is imported before vm-enterprise
         get_suite('/vm')
         mx.run_mx(['--env', 'ee', 'sforceimports'],
                   suite=get_suite('/vm-enterprise'))
         fetched_enterprise[0] = True
     suite.vc.update_to_branch(suite.vc_dir, commit)
     mx.run_mx(['sforceimports'], suite=suite)
     debug_str = "debug: graalpython={} graal={}".format(
         get_commit(get_suite('graalpython')), get_commit(get_suite('/vm')))
     if args.enterprise:
         debug_str += " graal-enterprise={}".format(
             get_commit(get_suite('/vm-enterprise')))
     print(debug_str)
     build_command = shlex.split(args.build_command)
     if not args.no_clean:
         try:
             clean_command = build_command[:build_command.
                                           index('build')] + ['clean']
             retcode = mx.run(clean_command, nonZeroIsFatal=False)
             if retcode:
                 print("Warning: clean command failed")
         except ValueError:
             pass
     retcode = mx.run(build_command, nonZeroIsFatal=False)
     if retcode:
         raise RuntimeError(
             "Failed to execute the build command for {}".format(commit))
     output = mx.OutputCapture()
     retcode = mx.run(shlex.split(args.benchmark_command),
                      out=mx.TeeOutputCapture(output),
                      nonZeroIsFatal=False)
     if retcode:
         raise RuntimeError(
             "Failed to execute benchmark for {}".format(commit))
     match = re.search(
         r'{}.*duration: ([\d.]+)'.format(
             re.escape(args.benchmark_criterion)), output.data)
     if not match:
         raise RuntimeError("Failed to get result from the benchmark")
     return float(match.group(1))
Ejemplo n.º 9
0
 def benchmarks(self):
     out = mx.OutputCapture()
     jt(['where', 'repos', 'all-ruby-benchmarks'], out=out)
     all_ruby_benchmarks = out.data.strip()
     benchmarks = []
     for root, dirs, files in os.walk(os.path.join(all_ruby_benchmarks, 'micro')):
         for name in files:
             if name.endswith('.rb'):
                 benchmark_file = os.path.join(root, name)[len(all_ruby_benchmarks)+1:]
                 out = mx.OutputCapture()
                 if jt(['benchmark', 'list', benchmark_file], out=out):
                     benchmarks.extend([benchmark_file + ':' + b.strip() for b in out.data.split('\n') if len(b.strip()) > 0])
                 else:
                     sys.stderr.write(out.data)
     return benchmarks
Ejemplo n.º 10
0
def checkListingExclusions(exclusions, expected):
    parent_dir = os.path.normpath(file_dir + sep + "..")
    out = mx.TeeOutputCapture(mx.OutputCapture())
    env = os.environ.copy()
    env["MX_PRIMARY_SUITE_PATH"] = parent_dir
    mx_bin = os.path.normpath(parent_dir + sep + "mx")
    mx.run([
        mx_bin, 'benchmark', 'jmh-dist:MX_MICRO_BENCHMARKS', '--', '--', '-l'
    ] + exclusions,
           out=out,
           env=env,
           cwd=parent_dir)

    # Extract benchmark names from the output.
    benchmarks = []
    start = re.compile("Benchmarks:")
    end = re.compile(r"\d+ benchmark data points dumped")
    collecting = False
    for line in out.underlying.data.splitlines():
        if start.match(line):
            collecting = True
        elif end.match(line):
            collecting = False
            break
        elif collecting:
            # Collect unqualified name.
            benchmarks.append(line.split('.')[-1])

    if set(benchmarks) != set(expected):
        mx.abort(
            f"Filtering benchmarks with {exclusions} gave {benchmarks}, expected {expected}"
        )
Ejemplo n.º 11
0
 def __exit__(self, exc_type, exc_value, traceback):
     if self.mapFiles:
         try:
             with tempfile.NamedTemporaryFile() as inputFile:
                 with tempfile.NamedTemporaryFile() as mapFile:
                     if len(self.capture.data) != 0:
                         inputFile.write(self.capture.data)
                         inputFile.flush()
                         for e in self.mapFiles:
                             with open(e, 'r') as m:
                                 shutil.copyfileobj(m, mapFile)
                                 mapFile.flush()
                         retraceOut = mx.OutputCapture()
                         proguard_cp = mx.classpath(
                             ['PROGUARD_RETRACE', 'PROGUARD'])
                         mx.run([
                             jdk.java, '-cp', proguard_cp,
                             'proguard.retrace.ReTrace', mapFile.name,
                             inputFile.name
                         ],
                                out=retraceOut)
                         if self.capture.data != retraceOut.data:
                             mx.log('>>>> BEGIN UNSTRIPPED OUTPUT')
                             mx.log(retraceOut.data)
                             mx.log('<<<< END UNSTRIPPED OUTPUT')
         except BaseException as e:
             mx.log(
                 'Error unstripping output from VM execution with stripped jars: '
                 + str(e))
     return None
Ejemplo n.º 12
0
def generate_llvm_config(args=None, **kwargs):

    constants = []

    # get config full string
    out = mx.OutputCapture()
    mx_sulong.llvm_tool(["llvm-config", "--version"] + list(args), out=out)
    full_version = out.data.strip()
    # NOTE: do not add full version until we need it to avoid regeneration
    # constants.append(("VERSION_FULL", full_version, "Full LLVM version string."))
    # version without suffix
    s = full_version.split("-", 3)
    version = s[0]
    constants.append(("VERSION", version, "LLVM version string."))
    # major, minor, patch
    s = version.split(".", 3)
    major_version, minor_version, patch_version = s[0], s[1], s[2]
    constants.append(("VERSION_MAJOR", int(major_version),
                      "Major version of the LLVM API."))
    constants.append(("VERSION_MINOR", int(minor_version),
                      "Minor version of the LLVM API."))
    constants.append(("VERSION_PATCH", int(patch_version),
                      "Patch version of the LLVM API."))

    file_comment = "GENERATED BY 'mx {}'. DO NOT MODIFY.".format(
        GENERATE_LLVM_CONFIG)

    _write_llvm_config_java(constants, file_comment)
    _write_llvm_config_mx(constants, file_comment)
Ejemplo n.º 13
0
def _find_classes_by_annotated_methods(annotations, suite):
    """
    Scan distributions from binary suite dependencies for classes contain at least one method
    with an annotation from 'annotations' and return a dictionary from fully qualified class
    names to the distribution containing the class.
    """
    binarySuiteDists = [
        d for d in mx.dependencies(opt_limit_to_suite=True)
        if d.isJARDistribution() and isinstance(d.suite, mx.BinarySuite) and (
            not suite or suite == d.suite)
    ]
    if len(binarySuiteDists) != 0:
        # Ensure Java support class is built
        mx.build(['--dependencies', 'com.oracle.mxtool.junit'])

        # Create map from jar file to the binary suite distribution defining it
        jars = {d.classpath_repr(): d for d in binarySuiteDists}

        cp = mx.classpath(['com.oracle.mxtool.junit'] +
                          [d.name for d in binarySuiteDists])
        out = mx.OutputCapture()
        mx.run_java(['-cp', cp] +
                    ['com.oracle.mxtool.junit.FindClassesByAnnotatedMethods'] +
                    annotations + jars.keys(),
                    out=out)
        candidates = {}
        for line in out.data.strip().split('\n'):
            name, jar = line.split(' ')
            # Record class name to the binary suite distribution containing it
            candidates[name] = jars[jar]
        return candidates
    return {}
Ejemplo n.º 14
0
def gate(args, tasks):
    with Task('Vm: Basic GraalVM Tests', tasks, tags=[VmGateTasks.graal]) as t:
        if t:
            _java = join(mx_vm.graalvm_output(), 'bin', 'java')

            _out = mx.OutputCapture()
            if mx.run([_java, '-XX:+JVMCIPrintProperties'], nonZeroIsFatal=False, out=_out, err=_out):
                mx.log_error(_out.data)
                mx.abort('The GraalVM image is not built with a JVMCI-enabled JDK, it misses `-XX:+JVMCIPrintProperties`.')

            _out = subprocess.check_output([_java, '-version'], stderr=subprocess.STDOUT)
            if args.strict_mode:
                # A full open-source build should be built with an open-source JDK
                _version_regex = _openjdk_version_regex
            else:
                # Allow Oracle JDK in non-strict mode as it is common on developer machines
                _version_regex = _anyjdk_version_regex
            match = _version_regex.match(_out)
            if match is None:
                if args.strict_mode and _anyjdk_version_regex.match(_out):
                    mx.abort("In --strict-mode, GraalVM must be built with OpenJDK")
                else:
                    mx.abort('Unexpected version string:\n{}Does not match:\n{}'.format(_out, _version_regex.pattern))
            elif match.group('graalvm_version') != _suite.release_version():
                mx.abort("Wrong GraalVM version in -version string: got '{}', expected '{}'".format(match.group('graalvm_version'), _suite.release_version()))

    if mx_vm.has_component('js'):
        with Task('Vm: Graal.js tests', tasks, tags=[VmGateTasks.graal_js]) as t:
            if t:
                pass

    gate_sulong(tasks)
    gate_ruby(tasks)
Ejemplo n.º 15
0
def python_svm(args):
    mx.run_mx(_SVM_ARGS + ["build"])
    out = mx.OutputCapture()
    mx.run_mx(_SVM_ARGS + ["graalvm-home"], out=mx.TeeOutputCapture(out))
    svm_image = os.path.join(out.data.strip(), "bin", "graalpython")
    mx.run([svm_image] + args)
    return svm_image
Ejemplo n.º 16
0
    def runBenchmark(self, benchmark, bmSuiteArgs):
        out = mx.OutputCapture()

        jt(['metrics', 'time', '--json'] + metrics_benchmarks[benchmark] +
           bmSuiteArgs,
           out=out)

        data = json.loads(out.data)

        return [{
            'benchmark':
            benchmark,
            'extra.metric.region':
            region,
            'metric.name':
            'time',
            'metric.value':
            sample,
            'metric.unit':
            's',
            'metric.better':
            'lower',
            'metric.iteration':
            n,
            'extra.metric.human':
            '%d/%d %s' % (n, len(region_data['samples']), region_data['human'])
        } for region, region_data in data.items()
                for n, sample in enumerate(region_data['samples'])]
Ejemplo n.º 17
0
    def runBenchmark(self, benchmark, bmSuiteArgs):
        out = mx.OutputCapture()

        jt(['metrics', 'time', '--json'] + metrics_benchmarks[benchmark] +
           bmSuiteArgs,
           out=out)

        lines = [line for line in out.data.split('\n') if len(line) > 0]
        mx.log('\n'.join(lines[0:-1]))

        json_data = lines[-1]
        mx.log('JSON:')
        mx.log(json_data)
        data = json.loads(json_data)

        return [{
            'benchmark': benchmark,
            'extra.metric.region': region,
            'metric.name': 'time',
            'metric.value': sample,
            'metric.unit': 'ms',
            'metric.better': 'lower',
            'metric.iteration': n
        } for region, region_data in data.items()
                for n, sample in enumerate(region_data['samples'])]
Ejemplo n.º 18
0
def test_modules(classpath, main_class, modules, program_args=None):
    """(jaotc-)Compiles `modules` and runs `main_class` + AOT library.
    Compares the output vs. standard JVM.
    """
    # Run on vanilla JVM.
    program_args = program_args or []
    expected_out = mx.OutputCapture()

    mx_compiler.run_vm((['-cp', classpath] if classpath else []) +
                       [main_class] + program_args,
                       out=expected_out)

    # jaotc uses ':' as separator.
    module_list = ':'.join(modules)

    for common_opts in common_opts_variants:
        mx.log('(jaotc) Compiling module(s) {} with {}'.format(
            module_list, ' '.join(common_opts)))
        with mktemp_libfile() as lib_module:
            run_jaotc(
                ['-J' + opt
                 for opt in common_opts] + ['--module', module_list] +
                ['--exit-on-error', '--info', '--output', lib_module.name])

            check_aot(classpath, main_class, common_opts, expected_out.data,
                      lib_module, program_args)
Ejemplo n.º 19
0
def jdk_omits_warning_for_jlink_set_ThreadPriorityPolicy(jdk):  # pylint: disable=invalid-name
    """
    Determines if the `jdk` suppresses a warning about ThreadPriorityPolicy when it
    is non-zero if the value is set from the jimage.
    https://bugs.openjdk.java.net/browse/JDK-8235908.
    """
    if not hasattr(jdk, '.omits_ThreadPriorityPolicy_warning'):
        out = mx.OutputCapture()
        sink = lambda x: x
        tmpdir = tempfile.mkdtemp(
            prefix='jdk_omits_warning_for_jlink_set_ThreadPriorityPolicy')
        jlink_exe = jdk.javac.replace('javac', 'jlink')
        mx.run([
            jlink_exe, '--add-options=-XX:ThreadPriorityPolicy=1',
            '--output=' + join(tmpdir, 'jdk'), '--add-modules=java.base'
        ])
        mx.run([mx.exe_suffix(join(tmpdir, 'jdk', 'bin', 'java')), '-version'],
               out=sink,
               err=out)
        shutil.rmtree(tmpdir)
        setattr(
            jdk, '.omits_ThreadPriorityPolicy_warning',
            '-XX:ThreadPriorityPolicy=1 may require system level permission'
            not in out.data)
    return getattr(jdk, '.omits_ThreadPriorityPolicy_warning')
Ejemplo n.º 20
0
    def _run(self, *args, **kwargs):
        cmd = [self.binary, '-j', self.parallelism]
        if mx.get_opts().very_verbose:
            cmd += ['-v']
        cmd += args

        out = kwargs.get('out', mx.OutputCapture())
        err = kwargs.get('err', subprocess.STDOUT)
        if mx.get_opts().verbose:
            if callable(out) and '-n' not in args:
                out = mx.TeeOutputCapture(out)
            if callable(err):
                err = mx.TeeOutputCapture(err)

        try:
            rc = mx.run(cmd,
                        nonZeroIsFatal=False,
                        out=out,
                        err=err,
                        cwd=self.build_dir)
        except OSError as e:
            if e.errno != errno.EACCES:
                mx.abort('Error executing \'{}\': {}'.format(
                    ' '.join(cmd), str(e)))
            mx.logv(
                '{} is not executable. Trying to change permissions...'.format(
                    self.binary))
            os.chmod(self.binary, 0o755)
            self._run(*args, **kwargs)  # retry
        else:
            not rc or mx.abort(rc if mx.get_opts().verbose else out.data)  # pylint: disable=expression-not-assigned
 def ext_version(self):
     out1 = mx.OutputCapture()
     out2 = mx.OutputCapture()
     mx.run([self.externalInterpreter(), "--version"],
            err=mx.TeeOutputCapture(out2),
            out=mx.TeeOutputCapture(out1))
     out1 = [] if not out1 or len(out1.data) <= 1 else out1.data.split("\n")
     out1 = [] if not out1 or len(out1) <= 1 else out1[0].split(" ")
     out2 = [] if not out2 or len(out2.data) <= 1 else out2.data.split("\n")
     out2 = [] if not out2 or len(out2) <= 1 else out2[0].split(" ")
     if len(out1) > 1:
         return out1[out1.index("Python") + 1].replace('-', '_')
     elif len(out2) > 1:
         return out2[out2.index("Python") + 1].replace('-', '_')
     else:
         return "unknown"
Ejemplo n.º 22
0
def test_modules(opts_set, classpath, main_class, modules, vm_args, program_args, commands):
    """(jaotc-)Compiles `modules` and runs `main_class` + AOT library.
    Compares the output vs. standard JVM.
    """
    # Run on vanilla JVM.
    program_args = program_args or []
    vm_args = vm_args or []
    commands = commands or ''
    expected_out = mx.OutputCapture()

    mx_compiler.run_vm((['-cp', classpath] if classpath else []) +
                       vm_args +
                       [main_class] + program_args, out=expected_out)

    # jaotc uses ':' as separator.
    module_list = ':'.join(modules)

    for common_opts in opts_set:
        mx.log('(jaotc) Compiling module(s) {} with {}'.format(module_list, ' '.join(common_opts)))
        with mktemp_libfile() as lib_module:
            lib_module.file.close()
            with tempfile.NamedTemporaryFile(mode='w', prefix='cmds_', suffix='.txt') as cmd_file:
                cmd_file.write(commands)
                cmd_file.file.close()
                run_jaotc(['-J' + opt for opt in common_opts] +
                          ['--module', module_list] +
                          ['--compile-commands', cmd_file.name] +
                          ['--exit-on-error', '--info', '--output', lib_module.name])

            check_aot(classpath, main_class, common_opts, expected_out.data, lib_module, program_args)
Ejemplo n.º 23
0
 def benchmark_callback(suite, commit):
     suite.vc.update_to_branch(suite.vc_dir, commit)
     mx.run_mx(['sforceimports'], suite=suite)
     mx.run_mx(['--env', 'ce', 'sforceimports'], suite=get_suite('/vm'))
     if args.enterprise and suite.name != 'vm-enterprise':
         checkout_args = [
             '--dynamicimports', '/vm-enterprise', 'checkout-downstream',
             'vm', 'vm-enterprise'
         ]
         if fetched_enterprise[0]:
             checkout_args.append('--no-fetch')
         mx.run_mx(checkout_args, out=mx.OutputCapture())
         # Make sure vm is imported before vm-enterprise
         get_suite('/vm')
         mx.run_mx(['--env', 'ee', 'sforceimports'],
                   suite=get_suite('/vm-enterprise'))
         fetched_enterprise[0] = True
     suite.vc.update_to_branch(suite.vc_dir, commit)
     mx.run_mx(['sforceimports'], suite=suite)
     debug_str = "debug: graalpython={} graal={}".format(
         get_commit(get_suite('graalpython')), get_commit(get_suite('/vm')))
     if args.enterprise:
         debug_str += " graal-enterprise={}".format(
             get_commit(get_suite('/vm-enterprise')))
     print(debug_str)
     env = os.environ.copy()
     env['MX_ALT_OUTPUT_ROOT'] = 'mxbuild-{}'.format(commit)
     retcode = mx.run(shlex.split(args.build_command),
                      env=env,
                      nonZeroIsFatal=False)
     if retcode:
         raise RuntimeError(
             "Failed to execute the build command for {}".format(commit))
     output = mx.OutputCapture()
     retcode = mx.run(shlex.split(args.benchmark_command),
                      env=env,
                      out=mx.TeeOutputCapture(output),
                      nonZeroIsFatal=False)
     if retcode:
         raise RuntimeError(
             "Failed to execute benchmark for {}".format(commit))
     match = re.search(
         r'{}.*duration: ([\d.]+)'.format(
             re.escape(args.benchmark_criterion)), output.data)
     if not match:
         raise RuntimeError("Failed to get result from the benchmark")
     return float(match.group(1))
Ejemplo n.º 24
0
def _test_libgraal_fatal_error_handling():
    """
    Tests that fatal errors in libgraal route back to HotSpot fatal error handling.
    """
    vmargs = [
        '-XX:+PrintFlagsFinal', '-Dlibgraal.CrashAt=length,hashCode',
        '-Dlibgraal.CrashAtIsFatal=true'
    ]
    cmd = ["dacapo:avrora", "--tracker=none", "--"
           ] + vmargs + ["--", "--preserve"]
    out = mx.OutputCapture()
    exitcode, bench_suite, _ = mx_benchmark.gate_mx_benchmark(
        cmd, out=out, err=out, nonZeroIsFatal=False)
    if exitcode == 0:
        if 'CrashAtIsFatal: no fatalError function pointer installed' in out.data:
            # Executing a VM that does not configure fatal errors handling
            # in libgraal to route back through the VM.
            pass
        else:
            mx.abort('Expected benchmark to result in non-zero exit code: ' +
                     ' '.join(cmd) + linesep + out.data)
    else:
        if len(bench_suite.scratchDirs()) == 0:
            mx.abort("No scratch dir found despite error being expected!")
        latest_scratch_dir = bench_suite.scratchDirs()[-1]
        seen_libjvmci_log = False
        hs_errs = glob.glob(join(latest_scratch_dir, 'hs_err_pid*.log'))
        if not hs_errs:
            mx.abort(
                'Expected a file starting with "hs_err_pid" in test directory. Entries found='
                + str(listdir(latest_scratch_dir)))

        for hs_err in hs_errs:
            mx.log("Verifying content of {}".format(
                join(latest_scratch_dir, hs_err)))
            with open(join(latest_scratch_dir, hs_err)) as fp:
                contents = fp.read()
            if 'libjvmci' in hs_err:
                seen_libjvmci_log = True
                if 'Fatal error: Forced crash' not in contents:
                    mx.abort(
                        'Expected "Fatal error: Forced crash" to be in contents of '
                        + hs_err + ':' + linesep + contents)
            else:
                if 'Fatal error in JVMCI' not in contents:
                    mx.abort(
                        'Expected "Fatal error in JVMCI" to be in contents of '
                        + hs_err + ':' + linesep + contents)

        if 'JVMCINativeLibraryErrorFile' in out.data and not seen_libjvmci_log:
            mx.abort(
                'Expected a file matching "hs_err_pid*_libjvmci.log" in test directory. Entries found='
                + str(listdir(latest_scratch_dir)))

    # Only clean up scratch dir on success
    for scratch_dir in bench_suite.scratchDirs():
        mx.log("Cleaning up scratch dir after gate task completion: {}".format(
            scratch_dir))
        mx.rmtree(scratch_dir)
Ejemplo n.º 25
0
 def run(self, cwd, args):
     _check_vm_args(self.name(), args)
     out = mx.OutputCapture()
     stdout_capture = mx.TeeOutputCapture(out)
     ret_code = mx.run([self.interpreter] + args,
                       out=stdout_capture,
                       err=stdout_capture)
     return ret_code, out.data
Ejemplo n.º 26
0
 def run(self, cwd, args):
     out = mx.TeeOutputCapture(mx.OutputCapture())
     args = self.post_process_command_line_args(args)
     mx.log("Running JVM with args: {0}".format(args))
     code = self.run_java(args, out=out, err=out, cwd=cwd, nonZeroIsFatal=False)
     out = out.underlying.data
     dims = self.dimensions(cwd, args, code, out)
     return code, out, dims
Ejemplo n.º 27
0
def _gate_python_benchmarks_tests(name, iterations):
    run_java = mx.run_java
    vmargs += ['-cp', mx.classpath(["com.oracle.graal.python"]), "com.oracle.graal.python.shell.GraalPythonMain", name, str(iterations)]
    success_pattern = re.compile(r"^(?P<benchmark>[a-zA-Z0-9.\-]+): (?P<score>[0-9]+(\.[0-9]+)?$)")
    out = mx.OutputCapture()
    run_java(vmargs, out=mx.TeeOutputCapture(out), err=subprocess.STDOUT)
    if not re.search(success_pattern, out.data, re.MULTILINE):
        mx.abort('Benchmark "' + name + '" doesn\'t match success pattern: ' + str(success_pattern))
Ejemplo n.º 28
0
 def testPeakPerformance(self, warmup):
     jmeterDirectory = mx.library("APACHE_JMETER_" + self.jmeterVersion(), True).get_path(True)
     jmeterPath = os.path.join(jmeterDirectory, "apache-jmeter-" + self.jmeterVersion(), "bin/ApacheJMeter.jar")
     jmeterCmd = [mx.get_jdk().java, "-jar", jmeterPath, "-n", "-t", self.workloadConfigurationPath(), "-j", "/dev/stdout"] # pylint: disable=line-too-long
     mx.log("Running JMeter: {0}".format(jmeterCmd))
     output = mx.TeeOutputCapture(mx.OutputCapture())
     mx.run(jmeterCmd, out=output, err=output)
     self.peakOutput = output.underlying.data
Ejemplo n.º 29
0
def _ctw_system_properties_suffix():
    out = mx.OutputCapture()
    out.data = 'System properties for CTW:\n\n'
    args = ['-XX:+EnableJVMCI'] + _ctw_jvmci_export_args()
    args.extend(['-cp', mx.classpath('org.graalvm.compiler.hotspot.test', jdk=jdk),
            '-DCompileTheWorld.Help=true', 'org.graalvm.compiler.hotspot.test.CompileTheWorld'])
    run_java(args, out=out, addDefaultArgs=False)
    return out.data
Ejemplo n.º 30
0
    def benchmarkList(self, _):
        jdk = mx.get_jdk(mx.distribution(BENCHMARKCASES_DISTRIBUTION).javaCompliance)
        jvm_args = mx.get_runtime_jvm_args([BENCHMARKCASES_DISTRIBUTION], jdk=jdk)
        args = jvm_args + [MEMORY_PROFILER_CLASS_NAME, "--list"]

        out = mx.OutputCapture()
        jdk.run_java(args, out=out)
        return out.data.split()