Exemplo n.º 1
0
def get_megaguards_junit_status(verbose=False):
    is_ok = get_megaguards_home_dir(check_only=True)
    is_ok = is_ok and get_megaguards_build_dir(check_only=True)
    is_ok = is_ok and get_megaguards_polyhedral_ld(check_only=True)
    is_ok = is_ok and get_megaguards_test_dataset(check_only=True)
    if is_ok:
        n = 3
        for t in range(n):
            out = mx.OutputCapture()
            _out = out if not verbose else mx.TeeOutputCapture(out)
            out_err = mx.OutputCapture()
            _out_err = out if not verbose else mx.TeeOutputCapture(out_err)
            print_progress(
                "Performing MegaGuards (core) junit tests.. (note: run 'mx junit-mg' for complete MegaGuards junit tests)"
            )
            retcode = mx.run(['mx', 'junit-mg-core'],
                             out=_out,
                             err=_out_err,
                             nonZeroIsFatal=False)
            if retcode == 0:
                break
            else:
                print_progress("Test failed.. retry %d of %d" % (t + 1, n))
        if retcode == 0:
            print_ok('MegaGuards core junit tests')
        else:
            print_warn('MegaGuards core junit tests encountered some errors.')

        is_ok = is_ok and retcode == 0

    return is_ok
Exemplo n.º 2
0
def run_mg_internal(args,
                    verbose=False,
                    extraVmArgs=None,
                    env=None,
                    jdk=None,
                    **kwargs):
    vmArgs, mgArgs = mx.extract_VM_args(args)
    vmArgs = ['-cp', mx.classpath(["edu.uci.megaguards"])]
    vmArgs.append("edu.uci.megaguards.shell.MGMain")
    if not jdk:
        jdk = get_jdk()
    out = mx.OutputCapture()
    _out = out if not verbose else mx.TeeOutputCapture(out)
    out_err = mx.OutputCapture()
    _out_err = out if not verbose else mx.TeeOutputCapture(out_err)
    n = 3
    for t in range(n):
        retcode = mx.run_java(vmArgs + [mgArgs],
                              out=_out,
                              err=_out_err,
                              jdk=jdk,
                              **kwargs)
        if retcode == 0:
            break
    return out.data
Exemplo n.º 3
0
def check_megaguards(device='GPU',
                     verbose=False,
                     cmd=['mx', 'python'],
                     testprogram='check_mg.py'):
    megaguards_opt = [
        '--mg-target=' + device.lower(), '--mg-log=eyxd',
        '--mg-target-threshold=1'
    ]
    check_python_program = [
        _suite.dir + os.sep + 'tests' + os.sep + testprogram
    ]
    n = 3
    for t in range(n):
        out = mx.OutputCapture()
        _out = out if not verbose else mx.TeeOutputCapture(out)
        out_err = mx.OutputCapture()
        _out_err = out if not verbose else mx.TeeOutputCapture(out_err)
        print_progress('Testing OpenCL device %s accessibility' % device)
        retcode = mx.run(cmd + check_python_program + megaguards_opt,
                         out=_out,
                         err=_out_err,
                         nonZeroIsFatal=False)
        if retcode == 0:
            break
        else:
            print("Execution failed.. retry %d of %d" % (t + 1, n))

    successRe = r"Execution Target:.+" + device + r""
    if not re.search(successRe, out.data, re.MULTILINE):
        print_error(opencl_err.format(device))
        return False
    else:
        print_ok("OpenCL device {0} has been detected!".format(device))
        return True
Exemplo n.º 4
0
    def runTester(self):
        config = self.loadConfiguration(self.benchmarkName())
        wrkDirectory = self.getLibraryDirectory()
        if mx.get_os() == "linux":
            distro = "linux"
        elif mx.get_os() == "darwin":
            distro = "macos"
        else:
            mx.abort("{0} not supported in {1}.".format(
                BaseWrkBenchmarkSuite.__name__, mx.get_os()))

        wrkPath = os.path.join(wrkDirectory, "wrk-{os}".format(os=distro))
        wrkFlags = self.setupWrkCmd(config)

        warmupDuration = None
        if self.inNativeMode():
            warmupDuration = config.get("warmup-duration-native-image", None)
        elif "warmup-duration" in config:
            warmupDuration = config["warmup-duration"]
        if warmupDuration:
            warmupWrkCmd = [wrkPath] + ["--duration",
                                        str(warmupDuration)] + wrkFlags
            mx.log("Warming up with Wrk: {0}".format(warmupWrkCmd))
            warmupOutput = mx.TeeOutputCapture(mx.OutputCapture())
            mx.run(warmupWrkCmd, out=warmupOutput, err=warmupOutput)

        if "duration" in config:
            wrkFlags = ["--duration", str(config["duration"])] + wrkFlags

        runWrkCmd = [wrkPath] + wrkFlags
        mx.log("Running Wrk: {0}".format(runWrkCmd))
        self.testerOutput = mx.TeeOutputCapture(mx.OutputCapture())
        mx.run(runWrkCmd, out=self.testerOutput, err=self.testerOutput)
Exemplo n.º 5
0
    def _run(self, *args, **kwargs):
        cmd = [self.binary, '-j', self.parallelism]
        if mx.get_opts().very_verbose:
            cmd += ['-v']
        cmd += args

        out = kwargs.get('out', mx.OutputCapture())
        err = kwargs.get('err', subprocess.STDOUT)
        if mx.get_opts().verbose:
            if callable(out) and '-n' not in args:
                out = mx.TeeOutputCapture(out)
            if callable(err):
                err = mx.TeeOutputCapture(err)

        try:
            rc = mx.run(cmd,
                        nonZeroIsFatal=False,
                        out=out,
                        err=err,
                        cwd=self.build_dir)
        except OSError as e:
            if e.errno != errno.EACCES:
                mx.abort('Error executing \'{}\': {}'.format(
                    ' '.join(cmd), str(e)))
            mx.logv(
                '{} is not executable. Trying to change permissions...'.format(
                    self.binary))
            os.chmod(self.binary, 0o755)
            self._run(*args, **kwargs)  # retry
        else:
            not rc or mx.abort(rc if mx.get_opts().verbose else out.data)  # pylint: disable=expression-not-assigned
Exemplo n.º 6
0
def python_svm(args):
    mx.run_mx(_SVM_ARGS + ["build"])
    out = mx.OutputCapture()
    mx.run_mx(_SVM_ARGS + ["graalvm-home"], out=mx.TeeOutputCapture(out))
    svm_image = os.path.join(out.data.strip(), "bin", "graalpython")
    mx.run([svm_image] + args)
    return svm_image
 def ext_version(self):
     out1 = mx.OutputCapture()
     out2 = mx.OutputCapture()
     mx.run([self.externalInterpreter(), "--version"],
            err=mx.TeeOutputCapture(out2),
            out=mx.TeeOutputCapture(out1))
     out1 = [] if not out1 or len(out1.data) <= 1 else out1.data.split("\n")
     out1 = [] if not out1 or len(out1) <= 1 else out1[0].split(" ")
     out2 = [] if not out2 or len(out2.data) <= 1 else out2.data.split("\n")
     out2 = [] if not out2 or len(out2) <= 1 else out2[0].split(" ")
     if len(out1) > 1:
         return out1[out1.index("Python") + 1].replace('-', '_')
     elif len(out2) > 1:
         return out2[out2.index("Python") + 1].replace('-', '_')
     else:
         return "unknown"
Exemplo n.º 8
0
def checkListingExclusions(exclusions, expected):
    parent_dir = os.path.normpath(file_dir + sep + "..")
    out = mx.TeeOutputCapture(mx.OutputCapture())
    env = os.environ.copy()
    env["MX_PRIMARY_SUITE_PATH"] = parent_dir
    mx_bin = os.path.normpath(parent_dir + sep + "mx")
    mx.run([
        mx_bin, 'benchmark', 'jmh-dist:MX_MICRO_BENCHMARKS', '--', '--', '-l'
    ] + exclusions,
           out=out,
           env=env,
           cwd=parent_dir)

    # Extract benchmark names from the output.
    benchmarks = []
    start = re.compile("Benchmarks:")
    end = re.compile(r"\d+ benchmark data points dumped")
    collecting = False
    for line in out.underlying.data.splitlines():
        if start.match(line):
            collecting = True
        elif end.match(line):
            collecting = False
            break
        elif collecting:
            # Collect unqualified name.
            benchmarks.append(line.split('.')[-1])

    if set(benchmarks) != set(expected):
        mx.abort(
            f"Filtering benchmarks with {exclusions} gave {benchmarks}, expected {expected}"
        )
Exemplo n.º 9
0
 def testPeakPerformance(self, warmup):
     jmeterDirectory = mx.library("APACHE_JMETER_" + self.jmeterVersion(), True).get_path(True)
     jmeterPath = os.path.join(jmeterDirectory, "apache-jmeter-" + self.jmeterVersion(), "bin/ApacheJMeter.jar")
     jmeterCmd = [mx.get_jdk().java, "-jar", jmeterPath, "-n", "-t", self.workloadConfigurationPath(), "-j", "/dev/stdout"] # pylint: disable=line-too-long
     mx.log("Running JMeter: {0}".format(jmeterCmd))
     output = mx.TeeOutputCapture(mx.OutputCapture())
     mx.run(jmeterCmd, out=output, err=output)
     self.peakOutput = output.underlying.data
Exemplo n.º 10
0
def _gate_python_benchmarks_tests(name, iterations):
    run_java = mx.run_java
    vmargs += ['-cp', mx.classpath(["com.oracle.graal.python"]), "com.oracle.graal.python.shell.GraalPythonMain", name, str(iterations)]
    success_pattern = re.compile(r"^(?P<benchmark>[a-zA-Z0-9.\-]+): (?P<score>[0-9]+(\.[0-9]+)?$)")
    out = mx.OutputCapture()
    run_java(vmargs, out=mx.TeeOutputCapture(out), err=subprocess.STDOUT)
    if not re.search(success_pattern, out.data, re.MULTILINE):
        mx.abort('Benchmark "' + name + '" doesn\'t match success pattern: ' + str(success_pattern))
Exemplo n.º 11
0
 def run(self, cwd, args):
     out = mx.TeeOutputCapture(mx.OutputCapture())
     args = self.post_process_command_line_args(args)
     mx.log("Running JVM with args: {0}".format(args))
     code = self.run_java(args, out=out, err=out, cwd=cwd, nonZeroIsFatal=False)
     out = out.underlying.data
     dims = self.dimensions(cwd, args, code, out)
     return code, out, dims
Exemplo n.º 12
0
 def run(self, cwd, args):
     _check_vm_args(self.name(), args)
     out = mx.OutputCapture()
     stdout_capture = mx.TeeOutputCapture(out)
     ret_code = mx.run([self.interpreter] + args,
                       out=stdout_capture,
                       err=stdout_capture)
     return ret_code, out.data
Exemplo n.º 13
0
 def run_launcher(self, cmd, args, cwd):
     """Run the 'cmd' command in the 'bin' directory."""
     out = mx.TeeOutputCapture(mx.OutputCapture())
     args = self.post_process_launcher_command_line_args(args)
     mx.log("Running '{}' on '{}' with args: '{}'".format(cmd, self.name(), args))
     code = mx.run([os.path.join(mx_vm.graalvm_home(fatalIfMissing=True), 'bin', cmd)] + args, out=out, err=out, cwd=cwd, nonZeroIsFatal=False)
     out = out.underlying.data
     dims = self.dimensions(cwd, args, code, out)
     return code, out, dims
Exemplo n.º 14
0
    def runWrk2(self, wrkFlags):
        distro = self.getOS()
        wrkDirectory = mx.library('WRK2', True).get_path(True)
        wrkPath = os.path.join(wrkDirectory, "wrk-{os}".format(os=distro))

        runWrkCmd = [wrkPath] + wrkFlags
        mx.log("Running Wrk2: {0}".format(runWrkCmd))
        output = mx.TeeOutputCapture(mx.OutputCapture())
        mx.run(runWrkCmd, out=output, err=output)
        return output.underlying.data
Exemplo n.º 15
0
 def runTester(self, benchmarkName):
     jmeterDirectory = mx.library("APACHE_JMETER_" + self.jmeterVersion(),
                                  True).get_path(True)
     jmeterPath = os.path.join(jmeterDirectory,
                               "apache-jmeter-" + self.jmeterVersion(),
                               "bin/ApacheJMeter.jar")
     jmeterCmd = [mx.get_jdk().java, "-jar", jmeterPath, "-n", "-t", self.jmeterWorkloadPath(benchmarkName), "-j", "/dev/stdout"]  # pylint: disable=line-too-long
     mx.log("Running JMeter: {0}".format(jmeterCmd))
     self.testerOutput = mx.TeeOutputCapture(mx.OutputCapture())
     mx.run(jmeterCmd, out=self.testerOutput, err=subprocess.PIPE)
Exemplo n.º 16
0
def _squeak_graalvm_launcher(args):
    """Build and run a GraalVM graalsqueak launcher"""

    dy = ['--dynamicimports', '/vm']
    mx.run_mx(dy + ['--env', 'ce-graalsqueak', 'build'])
    out = mx.OutputCapture()
    mx.run_mx(dy + ["graalvm-home"], out=mx.TeeOutputCapture(out))
    launcher = os.path.join(out.data.strip(), "bin", "graalsqueak").split("\n")[-1].strip()
    mx.log(launcher)
    if args:
        mx.run([launcher] + args)
    return launcher
Exemplo n.º 17
0
 def run(self, cwd, args):
     args += self._options
     if hasattr(self.host_vm(), 'run_lang'):
         return self.host_vm().run_lang('node', args + self._options, cwd)
     else:
         out = mx.TeeOutputCapture(mx.OutputCapture())
         args = self.host_vm().post_process_command_line_args(args)
         mx.log("Running {} with args: {}".format(self.name(), args))
         code = mx_graal_nodejs.node(args, add_graal_vm_args=False, nonZeroIsFatal=False, out=out, err=out, cwd=cwd)
         out = out.underlying.data
         dims = self.host_vm().dimensions(cwd, args, code, out)
         return code, out, dims
Exemplo n.º 18
0
 def benchmark_callback(suite, commit):
     suite.vc.update_to_branch(suite.vc_dir, commit)
     mx.run_mx(['sforceimports'], suite=suite)
     mx.run_mx(['--env', 'ce', 'sforceimports'], suite=get_suite('/vm'))
     if args.enterprise and suite.name != 'vm-enterprise':
         checkout_args = [
             '--dynamicimports', '/vm-enterprise', 'checkout-downstream',
             'vm', 'vm-enterprise'
         ]
         if fetched_enterprise[0]:
             checkout_args.append('--no-fetch')
         mx.run_mx(checkout_args, out=mx.OutputCapture())
         # Make sure vm is imported before vm-enterprise
         get_suite('/vm')
         mx.run_mx(['--env', 'ee', 'sforceimports'],
                   suite=get_suite('/vm-enterprise'))
         fetched_enterprise[0] = True
     suite.vc.update_to_branch(suite.vc_dir, commit)
     mx.run_mx(['sforceimports'], suite=suite)
     debug_str = "debug: graalpython={} graal={}".format(
         get_commit(get_suite('graalpython')), get_commit(get_suite('/vm')))
     if args.enterprise:
         debug_str += " graal-enterprise={}".format(
             get_commit(get_suite('/vm-enterprise')))
     print(debug_str)
     build_command = shlex.split(args.build_command)
     if not args.no_clean:
         try:
             clean_command = build_command[:build_command.
                                           index('build')] + ['clean']
             retcode = mx.run(clean_command, nonZeroIsFatal=False)
             if retcode:
                 print("Warning: clean command failed")
         except ValueError:
             pass
     retcode = mx.run(build_command, nonZeroIsFatal=False)
     if retcode:
         raise RuntimeError(
             "Failed to execute the build command for {}".format(commit))
     output = mx.OutputCapture()
     retcode = mx.run(shlex.split(args.benchmark_command),
                      out=mx.TeeOutputCapture(output),
                      nonZeroIsFatal=False)
     if retcode:
         raise RuntimeError(
             "Failed to execute benchmark for {}".format(commit))
     match = re.search(
         r'{}.*duration: ([\d.]+)'.format(
             re.escape(args.benchmark_criterion)), output.data)
     if not match:
         raise RuntimeError("Failed to get result from the benchmark")
     return float(match.group(1))
Exemplo n.º 19
0
 def __init__(self):
     super(BaseMicroserviceBenchmarkSuite, self).__init__()
     self.testerOutput = mx.TeeOutputCapture(mx.OutputCapture())
     self.timeToFirstResponseOutput = ''
     self.bmSuiteArgs = None
     self.workloadPath = None
     self.parser = argparse.ArgumentParser()
     self.parser.add_argument("--workload-configuration",
                              type=str,
                              default=None,
                              help="Path to workload configuration.")
     self.register_command_mapper_hook("TimeToFirstResponse",
                                       timeToFirstResponse)
Exemplo n.º 20
0
    def runWrk2(self, wrkFlags):
        distro = self.getOS()
        arch = mx.get_arch()
        wrkDirectory = mx.library('WRK2_MULTIARCH', True).get_path(True)
        wrkPath = os.path.join(wrkDirectory, "wrk-{os}-{arch}".format(os=distro, arch=arch))

        if not os.path.exists(wrkPath):
            raise ValueError("Unsupported OS or arch. Binary doesn't exist: {}".format(wrkPath))

        runWrkCmd = [wrkPath] + wrkFlags
        mx.log("Running Wrk2: {0}".format(runWrkCmd))
        output = mx.TeeOutputCapture(mx.OutputCapture())
        mx.run(runWrkCmd, out=output, err=output)
        return output.underlying.data
Exemplo n.º 21
0
def _python_graalvm_launcher(args):
    dy = "/vm,/tools,/substratevm"
    if "sandboxed" in args:
        args.remove("sandboxed")
        dy += ",/sulong-managed"
    dy = ["--dynamicimports", dy]
    mx.run_mx(dy + ["build"])
    out = mx.OutputCapture()
    mx.run_mx(dy + ["graalvm-home"], out=mx.TeeOutputCapture(out))
    launcher = os.path.join(out.data.strip(), "bin", "graalpython").split("\n")[-1].strip()
    mx.log(launcher)
    if args:
        mx.run([launcher] + args)
    return launcher
Exemplo n.º 22
0
 def run_launcher(self, cmd, args, cwd):
     """Run the 'cmd' command in the 'bin' directory."""
     args = self.post_process_launcher_command_line_args(args)
     self.extract_vm_info(args)
     mx.log("Running '{}' on '{}' with args: '{}'".format(
         cmd, self.name(), " ".join(args)))
     out = mx.TeeOutputCapture(mx.OutputCapture())
     command = [os.path.join(self.home(), 'bin', cmd)] + args
     command = mx.apply_command_mapper_hooks(command,
                                             self.command_mapper_hooks)
     code = mx.run(command, out=out, err=out, cwd=cwd, nonZeroIsFatal=False)
     out = out.underlying.data
     dims = self.dimensions(cwd, args, code, out)
     return code, out, dims
Exemplo n.º 23
0
 def __enter__(self):
     if mx.get_opts().strip_jars and self.out is None and (self.err is None or self.err == subprocess.STDOUT):
         delims = re.compile('[' + os.pathsep + '=]')
         for a in self.args:
             for e in delims.split(a):
                 candidate = e + '.map'
                 if exists(candidate):
                     if self.mapFiles is None:
                         self.mapFiles = set()
                     self.mapFiles.add(candidate)
         self.capture = mx.OutputCapture()
         self.out = mx.TeeOutputCapture(self.capture)
         self.err = self.out
     return self
Exemplo n.º 24
0
 def benchmark_callback(suite, commit, bench_command=args.benchmark_command):
     checkout_and_build_suite(suite, commit)
     output = mx.OutputCapture()
     retcode = mx.run(shlex.split(bench_command), out=mx.TeeOutputCapture(output), nonZeroIsFatal=False)
     if retcode:
         if args.benchmark_criterion == 'WORKS':
             return sys.maxsize
         else:
             raise RuntimeError("Failed to execute benchmark for {}".format(commit))
     elif args.benchmark_criterion == 'WORKS':
         return 0
     match = re.search(r'{}.*duration: ([\d.]+)'.format(re.escape(args.benchmark_criterion)), output.data)
     if not match:
         raise RuntimeError("Failed to get result from the benchmark")
     return float(match.group(1))
Exemplo n.º 25
0
def _gate_python_benchmarks_tests(name, iterations, extra_vmargs=[]):
    vmargs = extra_vmargs + ['-Xms2g', '-Xmx2g']
    run_java = mx.run_java

    if _mx_graal:
        vmargs += ['-Dgraal.TraceTruffleCompilation=true']
        run_java = mx_benchmark.get_java_vm('server', 'graal-core').run_java

    vmargs += ['-cp', mx.classpath(["edu.uci.python"]), "edu.uci.python.shell.Shell", name, str(iterations)]
    successRe = r"^(?P<benchmark>[a-zA-Z0-9\.\-]+): (?P<score>[0-9]+(\.[0-9]+)?$)"
    out = mx.OutputCapture()
    run_java(vmargs, out=mx.TeeOutputCapture(out), err=subprocess.STDOUT)

    if not re.search(successRe, out.data, re.MULTILINE):
        mx.abort('Benchmark "'+ name +'" doesn\'t match success pattern: ' + successRe)
Exemplo n.º 26
0
def get_megaguards_benchmark_suite(force=False,
                                   check_only=False,
                                   verbose=False):
    is_exist = os.path.exists(_suite.dir + megaguards_benchmarks_path +
                              os.sep + '.git')
    if check_only:
        return is_exist
    if not is_exist or force:
        print_progress('Importing MegaGuards benchmarks suite')
        out = mx.OutputCapture()
        _out = out if not verbose else mx.TeeOutputCapture(out)
        mx.run(['git', 'submodule', 'update'], out=_out, nonZeroIsFatal=True)
        is_exist = True

    return is_exist
 def runAndReturnStdOut(self, benchmarks, bmSuiteArgs):
     args = self.createCommandLineArgs(benchmarks, bmSuiteArgs)
     cwd = self.workingDirectory(benchmarks, bmSuiteArgs)
     if args is None:
         return 0, "", {}
     out = mx.TeeOutputCapture(mx.OutputCapture())
     mx.log("Running " + self.externalInterpreter() +
            " with args: {0}".format(args))
     code = mx.run([self.externalInterpreter()] + args,
                   out=out,
                   err=out,
                   cwd=cwd,
                   nonZeroIsFatal=False)
     out = out.underlying.data
     dims = self.dimensions()
     return code, out, dims
Exemplo n.º 28
0
 def benchmark_callback(suite, commit):
     suite.vc.update_to_branch(suite.vc_dir, commit)
     mx.run_mx(['sforceimports'], suite=suite)
     mx.run_mx(['--env', 'ce', 'sforceimports'], suite=get_suite('/vm'))
     if args.enterprise and suite.name != 'vm-enterprise':
         checkout_args = [
             '--dynamicimports', '/vm-enterprise', 'checkout-downstream',
             'vm', 'vm-enterprise'
         ]
         if fetched_enterprise[0]:
             checkout_args.append('--no-fetch')
         mx.run_mx(checkout_args, out=mx.OutputCapture())
         # Make sure vm is imported before vm-enterprise
         get_suite('/vm')
         mx.run_mx(['--env', 'ee', 'sforceimports'],
                   suite=get_suite('/vm-enterprise'))
         fetched_enterprise[0] = True
     suite.vc.update_to_branch(suite.vc_dir, commit)
     mx.run_mx(['sforceimports'], suite=suite)
     debug_str = "debug: graalpython={} graal={}".format(
         get_commit(get_suite('graalpython')), get_commit(get_suite('/vm')))
     if args.enterprise:
         debug_str += " graal-enterprise={}".format(
             get_commit(get_suite('/vm-enterprise')))
     print(debug_str)
     env = os.environ.copy()
     env['MX_ALT_OUTPUT_ROOT'] = 'mxbuild-{}'.format(commit)
     retcode = mx.run(shlex.split(args.build_command),
                      env=env,
                      nonZeroIsFatal=False)
     if retcode:
         raise RuntimeError(
             "Failed to execute the build command for {}".format(commit))
     output = mx.OutputCapture()
     retcode = mx.run(shlex.split(args.benchmark_command),
                      env=env,
                      out=mx.TeeOutputCapture(output),
                      nonZeroIsFatal=False)
     if retcode:
         raise RuntimeError(
             "Failed to execute benchmark for {}".format(commit))
     match = re.search(
         r'{}.*duration: ([\d.]+)'.format(
             re.escape(args.benchmark_criterion)), output.data)
     if not match:
         raise RuntimeError("Failed to get result from the benchmark")
     return float(match.group(1))
Exemplo n.º 29
0
 def runJMeterInBackground(jmeterBenchmarkSuite, benchmarkName):
     if not JMeterBenchmarkSuite.findApplication(
             jmeterBenchmarkSuite.applicationPort()):
         mx.abort(
             "Failed to find server application in JMeterBenchmarkSuite")
     jmeterCmd = [mx.get_jdk().java, "-jar", jmeterBenchmarkSuite.jmeterPath(), "-n", "-t", jmeterBenchmarkSuite.workloadPath(benchmarkName), "-j", "/dev/stdout"]  # pylint: disable=line-too-long
     mx.log("Running JMeter: {0}".format(jmeterCmd))
     jmeterBenchmarkSuite.jmeterOutput = mx.TeeOutputCapture(
         mx.OutputCapture())
     mx.run(jmeterCmd,
            out=jmeterBenchmarkSuite.jmeterOutput,
            err=subprocess.PIPE)
     if not jmeterBenchmarkSuite.terminateApplication(
             jmeterBenchmarkSuite.applicationPort()):
         mx.abort(
             "Failed to terminate server application in JMeterBenchmarkSuite"
         )
Exemplo n.º 30
0
def run_embedded_native_python_test(args=None):
    """
    Test that embedding an engine where a context was initialized at native image
    build-time is enough to create multiple contexts from that engine without
    those contexts having access to the core files, due to caching in the shared
    engine.
    """
    with mx.TempDirCwd(os.getcwd()) as dirname:
        python_launcher = python_gvm()
        graalvm_javac = os.path.join(os.path.dirname(python_launcher), "javac")
        graalvm_native_image = os.path.join(os.path.dirname(python_launcher),
                                            "native-image")

        filename = os.path.join(dirname, "HelloWorld.java")
        with open(filename, "w") as f:
            f.write("""
            import org.graalvm.polyglot.*;

            public class HelloWorld {
                static final Engine engine = Engine.newBuilder().allowExperimentalOptions(true).option("log.python.level", "FINEST").build();
                static {
                   try (Context contextNull = Context.newBuilder("python").engine(engine).build()) {
                       contextNull.initialize("python");
                   }
                }

                public static void main(String[] args) {
                    try (Context context1 = Context.newBuilder("python").engine(engine).build()) {
                        context1.eval("python", "print(b'abc'.decode('ascii'))");
                        try (Context context2 = Context.newBuilder("python").engine(engine).build()) {
                            context2.eval("python", "print(b'xyz'.decode('ascii'))");
                        }
                    }
                }
            }
            """)
        out = mx.OutputCapture()
        mx.run([graalvm_javac, filename])
        mx.run([
            graalvm_native_image, "--initialize-at-build-time",
            "--language:python", "HelloWorld"
        ])
        mx.run(["./helloworld"], out=mx.TeeOutputCapture(out))
        assert "abc" in out.data
        assert "xyz" in out.data