Esempio n. 1
0
def _test_libgraal_fatal_error_handling():
    """
    Tests that fatal errors in libgraal route back to HotSpot fatal error handling.
    """
    vmargs = [
        '-XX:+PrintFlagsFinal', '-Dlibgraal.CrashAt=length,hashCode',
        '-Dlibgraal.CrashAtIsFatal=true'
    ]
    cmd = ["dacapo:avrora", "--tracker=none", "--"
           ] + vmargs + ["--", "--preserve"]
    out = mx.OutputCapture()
    exitcode, bench_suite, _ = mx_benchmark.gate_mx_benchmark(
        cmd, out=out, err=out, nonZeroIsFatal=False)
    if exitcode == 0:
        if 'CrashAtIsFatal: no fatalError function pointer installed' in out.data:
            # Executing a VM that does not configure fatal errors handling
            # in libgraal to route back through the VM.
            pass
        else:
            mx.abort('Expected benchmark to result in non-zero exit code: ' +
                     ' '.join(cmd) + linesep + out.data)
    else:
        if len(bench_suite.scratchDirs()) == 0:
            mx.abort("No scratch dir found despite error being expected!")
        latest_scratch_dir = bench_suite.scratchDirs()[-1]
        seen_libjvmci_log = False
        hs_errs = glob.glob(join(latest_scratch_dir, 'hs_err_pid*.log'))
        if not hs_errs:
            mx.abort(
                'Expected a file starting with "hs_err_pid" in test directory. Entries found='
                + str(listdir(latest_scratch_dir)))

        for hs_err in hs_errs:
            mx.log("Verifying content of {}".format(
                join(latest_scratch_dir, hs_err)))
            with open(join(latest_scratch_dir, hs_err)) as fp:
                contents = fp.read()
            if 'libjvmci' in hs_err:
                seen_libjvmci_log = True
                if 'Fatal error: Forced crash' not in contents:
                    mx.abort(
                        'Expected "Fatal error: Forced crash" to be in contents of '
                        + hs_err + ':' + linesep + contents)
            else:
                if 'Fatal error in JVMCI' not in contents:
                    mx.abort(
                        'Expected "Fatal error in JVMCI" to be in contents of '
                        + hs_err + ':' + linesep + contents)

        if 'JVMCINativeLibraryErrorFile' in out.data and not seen_libjvmci_log:
            mx.abort(
                'Expected a file matching "hs_err_pid*_libjvmci.log" in test directory. Entries found='
                + str(listdir(latest_scratch_dir)))

    # Only clean up scratch dir on success
    for scratch_dir in bench_suite.scratchDirs():
        mx.log("Cleaning up scratch dir after gate task completion: {}".format(
            scratch_dir))
        mx.rmtree(scratch_dir)
Esempio n. 2
0
def check(command, included, excluded):
    mx.log("mx benchmark " + command)

    exit_code, _, results = gate_mx_benchmark([command, '--tracker', 'none'],
                                              nonZeroIsFatal=False)

    if exit_code != 0:
        mx.abort("{} exit code was {}".format(command, exit_code))

    executed_benchmarks = set([point["benchmark"] for point in results])

    for test_bench in included:
        if test_bench not in executed_benchmarks:
            mx.abort("The test " + test_bench +
                     " is not in the specified list: " + str(included))

    for executed_bench in executed_benchmarks:
        if executed_bench in excluded:
            mx.abort("The test " + executed_bench + " is in excluded list: " +
                     str(excluded))
Esempio n. 3
0
def gate_body(args, tasks):
    with Task('Vm: GraalVM dist names', tasks, tags=['names']) as t:
        if t:
            mx_sdk_vm.verify_graalvm_configs(suites=['vm', 'vm-enterprise'])

    with Task('Vm: Basic GraalVM Tests', tasks,
              tags=[VmGateTasks.compiler]) as t:
        if t and mx_sdk_vm_impl.has_component('GraalVM compiler'):
            # 1. the build must be a GraalVM
            # 2. the build must be JVMCI-enabled since the 'GraalVM compiler' component is registered
            mx_sdk_vm_impl.check_versions(
                mx_sdk_vm_impl.graalvm_output(),
                graalvm_version_regex=mx_sdk_vm_impl.graalvm_version_regex,
                expect_graalvm=True,
                check_jvmci=True)

    if mx_sdk_vm_impl.has_component('LibGraal'):
        libgraal_location = mx_sdk_vm_impl.get_native_image_locations(
            'LibGraal', 'jvmcicompiler')
        if libgraal_location is None:
            mx.warn(
                "Skipping libgraal tests: no library enabled in the LibGraal component"
            )
        else:
            extra_vm_arguments = [
                '-XX:+UseJVMCICompiler', '-XX:+UseJVMCINativeLibrary',
                '-XX:JVMCILibPath=' + dirname(libgraal_location)
            ]
            if args.extra_vm_argument:
                extra_vm_arguments += args.extra_vm_argument
            import mx_compiler

            # run avrora on the GraalVM binary itself
            with Task('LibGraal Compiler:GraalVM DaCapo-avrora',
                      tasks,
                      tags=[VmGateTasks.libgraal]) as t:
                if t:
                    java_exe = join(mx_sdk_vm_impl.graalvm_home(), 'bin',
                                    'java')
                    mx.run([
                        java_exe, '-XX:+UseJVMCICompiler',
                        '-XX:+UseJVMCINativeLibrary', '-jar',
                        mx.library('DACAPO').get_path(True), 'avrora', '-n',
                        '1'
                    ])

                    # Ensure that fatal errors in libgraal route back to HotSpot
                    vmargs = [
                        '-XX:+UseJVMCICompiler', '-XX:+UseJVMCINativeLibrary',
                        '-XX:+PrintFlagsFinal',
                        '-Dlibgraal.CrashAt=length,hashCode',
                        '-Dlibgraal.CrashAtIsFatal=true'
                    ]
                    cmd = ["dacapo:avrora", "--tracker=none", "--"
                           ] + vmargs + ["--", "--preserve"]
                    out = mx.OutputCapture()
                    exitcode, bench_suite, _ = mx_benchmark.gate_mx_benchmark(
                        cmd, nonZeroIsFatal=False, out=out, err=out)
                    if exitcode == 0:
                        if 'CrashAtIsFatal: no fatalError function pointer installed' in out.data:
                            # Executing a VM that does not configure fatal errors handling
                            # in libgraal to route back through the VM.
                            pass
                        else:
                            mx.abort(
                                'Expected following benchmark to result in non-zero exit code: '
                                + ' '.join(cmd))
                    else:
                        if len(bench_suite.scratchDirs()) == 0:
                            mx.abort(
                                "No scratch dir found despite error being expected!"
                            )
                        latest_scratch_dir = bench_suite.scratchDirs()[-1]
                        seen_libjvmci_log = False
                        hs_errs = glob.glob(
                            join(latest_scratch_dir, 'hs_err_pid*.log'))
                        if not hs_errs:
                            mx.abort(
                                'Expected a file starting with "hs_err_pid" in test directory. Entries found='
                                + str(listdir(latest_scratch_dir)))

                        for hs_err in hs_errs:
                            mx.log("Verifying content of {}".format(
                                join(latest_scratch_dir, hs_err)))
                            with open(join(latest_scratch_dir, hs_err)) as fp:
                                contents = fp.read()
                            if 'libjvmci' in hs_err:
                                seen_libjvmci_log = True
                                if 'Fatal error: Forced crash' not in contents:
                                    mx.abort(
                                        'Expected "Fatal error: Forced crash" to be in contents of '
                                        + hs_err + ':' + linesep + contents)
                            else:
                                if 'Fatal error in JVMCI' not in contents:
                                    mx.abort(
                                        'Expected "Fatal error in JVMCI" to be in contents of '
                                        + hs_err + ':' + linesep + contents)

                        if 'JVMCINativeLibraryErrorFile' in out.data and not seen_libjvmci_log:
                            mx.abort(
                                'Expected a file matching "hs_err_pid*_libjvmci.log" in test directory. Entries found='
                                + str(listdir(latest_scratch_dir)))

                    # Only clean up scratch dir on success
                    for scratch_dir in bench_suite.scratchDirs():
                        mx.log(
                            "Cleaning up scratch dir after gate task completion: {}"
                            .format(scratch_dir))
                        mx.rmtree(scratch_dir)

            with Task('LibGraal Compiler:CTW',
                      tasks,
                      tags=[VmGateTasks.libgraal]) as t:
                if t:
                    mx_compiler.ctw([
                        '-DCompileTheWorld.Config=Inline=false CompilationFailureAction=ExitVM',
                        '-esa',
                        '-XX:+EnableJVMCI',
                        '-DCompileTheWorld.MultiThreaded=true',
                        '-Dgraal.InlineDuringParsing=false',
                        '-Dgraal.TrackNodeSourcePosition=true',
                        '-DCompileTheWorld.Verbose=false',
                        '-DCompileTheWorld.HugeMethodLimit=4000',
                        '-DCompileTheWorld.MaxCompiles=150000',
                        '-XX:ReservedCodeCacheSize=300m',
                    ], extra_vm_arguments)

            mx_compiler.compiler_gate_benchmark_runner(
                tasks, extra_vm_arguments, prefix='LibGraal Compiler:')

            with Task('LibGraal Truffle:unittest',
                      tasks,
                      tags=[VmGateTasks.libgraal]) as t:
                if t:

                    def _unittest_config_participant(config):
                        vmArgs, mainClass, mainClassArgs = config

                        def is_truffle_fallback(arg):
                            fallback_args = [
                                "-Dtruffle.TruffleRuntime=com.oracle.truffle.api.impl.DefaultTruffleRuntime",
                                "-Dgraalvm.ForcePolyglotInvalid=true"
                            ]
                            return arg in fallback_args

                        newVmArgs = [
                            arg for arg in vmArgs
                            if not is_truffle_fallback(arg)
                        ]
                        return (newVmArgs, mainClass, mainClassArgs)

                    mx_unittest.add_config_participant(
                        _unittest_config_participant)
                    excluded_tests = environ.get("TEST_LIBGRAAL_EXCLUDE")
                    if excluded_tests:
                        with NamedTemporaryFile(prefix='blacklist.',
                                                mode='w',
                                                delete=False) as fp:
                            fp.file.writelines(
                                [l + '\n' for l in excluded_tests.split()])
                            unittest_args = ["--blacklist", fp.name]
                    else:
                        unittest_args = []
                    unittest_args = unittest_args + [
                        "--enable-timing", "--verbose"
                    ]
                    compiler_log_file = "graal-compiler.log"
                    mx_unittest.unittest(unittest_args + extra_vm_arguments + [
                        "-Dpolyglot.engine.AllowExperimentalOptions=true",
                        "-Dpolyglot.engine.CompileImmediately=true",
                        "-Dpolyglot.engine.BackgroundCompilation=false",
                        "-Dpolyglot.engine.TraceCompilation=true",
                        "-Dpolyglot.log.file={0}".format(compiler_log_file),
                        "-Dgraalvm.locatorDisabled=true", "truffle"
                    ])
                    if exists(compiler_log_file):
                        remove(compiler_log_file)
    else:
        mx.warn("Skipping libgraal tests: component not enabled")

    gate_substratevm(tasks)
    gate_sulong(tasks)
    gate_python(tasks)
    gate_svm_sl_tck(tasks)
    gate_svm_truffle_tck_js(tasks)