Example #1
0
    def test(self, vm, cwd=None, extraVmOpts=None, vmbuild=None):
        """
        Run this program as a sanity test.
        """
        if vm in self.ignoredVMs:
            return True
        if cwd is None:
            cwd = self.defaultCwd
        parser = OutputParser()
        jvmError = re.compile(
            r"(?P<jvmerror>([A-Z]:|/).*[/\\]hs_err_pid[0-9]+\.log)")
        parser.addMatcher(ValuesMatcher(jvmError, {'jvmError': '<jvmerror>'}))

        for successRE in self.successREs:
            parser.addMatcher(ValuesMatcher(successRE, {'passed': '1'}))
        for failureRE in self.failureREs:
            parser.addMatcher(ValuesMatcher(failureRE, {'failed': '1'}))

        tee = Tee()
        retcode = mx_graal_core.run_vm(
            self.vmOpts + _noneAsEmptyList(extraVmOpts) + self.cmd,
            vm,
            nonZeroIsFatal=False,
            out=tee.eat,
            err=subprocess.STDOUT,
            cwd=cwd,
            vmbuild=vmbuild)
        output = tee.output.getvalue()
        valueMaps = parser.parse(output)

        if len(valueMaps) == 0:
            return False

        record = {}
        for valueMap in valueMaps:
            for key, value in valueMap.items():
                if record.has_key(key) and record[key] != value:
                    mx.abort(
                        'Inconsistant values returned by test machers : ' +
                        str(valueMaps))
                record[key] = value

        jvmErrorFile = record.get('jvmError')
        if jvmErrorFile:
            mx.log('/!\\JVM Error : dumping error log...')
            with open(jvmErrorFile, 'rb') as fp:
                mx.log(fp.read())
            os.unlink(jvmErrorFile)
            return False

        if record.get('failed') == '1':
            return False

        return retcode == 0 and record.get('passed') == '1'
Example #2
0
def deoptalot(args):
    """bootstrap a VM with DeoptimizeALot and VerifyOops on

    If the first argument is a number, the process will be repeated
    this number of times. All other arguments are passed to the VM."""
    count = 1
    if len(args) > 0 and args[0].isdigit():
        count = int(args[0])
        del args[0]

    for _ in range(count):
        if not mx_graal_core.run_vm(['-XX:-TieredCompilation', '-XX:+DeoptimizeALot', '-XX:+VerifyOops'] + args + ['-version']) == 0:
            mx.abort("Failed")
Example #3
0
def deoptalot(args):
    """bootstrap a VM with DeoptimizeALot and VerifyOops on

    If the first argument is a number, the process will be repeated
    this number of times. All other arguments are passed to the VM."""
    count = 1
    if len(args) > 0 and args[0].isdigit():
        count = int(args[0])
        del args[0]

    for _ in range(count):
        if not mx_graal_core.run_vm([
                '-XX:-TieredCompilation', '-XX:+DeoptimizeALot',
                '-XX:+VerifyOops'
        ] + args + ['-version']) == 0:
            mx.abort("Failed")
Example #4
0
    def test(self, vm, cwd=None, extraVmOpts=None, vmbuild=None):
        """
        Run this program as a sanity test.
        """
        if vm in self.ignoredVMs:
            return True

        self.lazyInit()
        if cwd is None:
            cwd = self.defaultCwd
        parser = OutputParser()
        jvmError = re.compile(r"(?P<jvmerror>([A-Z]:|/).*[/\\]hs_err_pid[0-9]+\.log)")
        parser.addMatcher(ValuesMatcher(jvmError, {'jvmError' : '<jvmerror>'}))

        for successRE in self.successREs:
            parser.addMatcher(ValuesMatcher(successRE, {'passed' : '1'}))
        for failureRE in self.failureREs:
            parser.addMatcher(ValuesMatcher(failureRE, {'failed' : '1'}))

        tee = Tee()
        retcode = mx_graal_core.run_vm(self.vmOpts + _noneAsEmptyList(extraVmOpts) + self.cmd, vm, nonZeroIsFatal=False, out=tee.eat, err=subprocess.STDOUT, cwd=cwd, vmbuild=vmbuild)
        output = tee.output.getvalue()
        valueMaps = parser.parse(output)

        if len(valueMaps) == 0:
            return False

        record = {}
        for valueMap in valueMaps:
            for key, value in valueMap.items():
                if record.has_key(key) and record[key] != value:
                    mx.abort('Inconsistant values returned by test machers : ' + str(valueMaps))
                record[key] = value

        jvmErrorFile = record.get('jvmError')
        if jvmErrorFile:
            mx.log('/!\\JVM Error : dumping error log...')
            with open(jvmErrorFile, 'rb') as fp:
                mx.log(fp.read())
            os.unlink(jvmErrorFile)
            return False

        if record.get('failed') == '1':
            return False

        return retcode == 0 and record.get('passed') == '1'
Example #5
0
    def bench(self, vm, cwd=None, extraVmOpts=None, vmbuild=None):
        """
        Run this program as a benchmark.
        """
        if vm in self.ignoredVMs:
            return {}
        if cwd is None:
            cwd = self.defaultCwd
        parser = OutputParser()

        for successRE in self.successREs:
            parser.addMatcher(ValuesMatcher(successRE, {'passed' : '1'}))
        for failureRE in self.failureREs:
            parser.addMatcher(ValuesMatcher(failureRE, {'failed' : '1'}))
        for scoreMatcher in self.scoreMatchers:
            parser.addMatcher(scoreMatcher)

        if self.benchmarkCompilationRate:
            if vm == 'jvmci':
                bps = re.compile(r"ParsedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                ibps = re.compile(r"InlinedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                parser.addMatcher(ValuesMatcher(bps, {'group' : 'ParsedBytecodesPerSecond', 'name' : self.name, 'score' : '<rate>'}))
                parser.addMatcher(ValuesMatcher(ibps, {'group' : 'InlinedBytecodesPerSecond', 'name' : self.name, 'score' : '<rate>'}))
            else:
                ibps = re.compile(r"(?P<compiler>[\w]+) compilation speed: +(?P<rate>[0-9]+) bytes/s {standard")
                parser.addMatcher(ValuesMatcher(ibps, {'group' : 'InlinedBytecodesPerSecond', 'name' : '<compiler>:' + self.name, 'score' : '<rate>'}))

        startDelim = 'START: ' + self.name
        endDelim = 'END: ' + self.name

        outputfile = os.environ.get('BENCH_OUTPUT', None)
        if outputfile:
            # Used only to debug output parsing
            with open(outputfile) as fp:
                output = fp.read()
                start = output.find(startDelim)
                end = output.find(endDelim, start)
                if start == -1 and end == -1:
                    return {}
                output = output[start + len(startDelim + os.linesep): end]
                mx.log(startDelim)
                mx.log(output)
                mx.log(endDelim)
        else:
            tee = Tee()
            mx.log(startDelim)
            if mx_graal_core.run_vm(self.vmOpts + _noneAsEmptyList(extraVmOpts) + self.cmd, vm, nonZeroIsFatal=False, out=tee.eat, err=subprocess.STDOUT, cwd=cwd, vmbuild=vmbuild) != 0:
                mx.abort("Benchmark failed (non-zero retcode)")
            mx.log(endDelim)
            output = tee.output.getvalue()

        groups = {}
        passed = False
        for valueMap in parser.parse(output):
            assert (valueMap.has_key('name') and valueMap.has_key('score') and valueMap.has_key('group')) or valueMap.has_key('passed') or valueMap.has_key('failed'), valueMap
            if valueMap.get('failed') == '1':
                mx.abort("Benchmark failed")
            if valueMap.get('passed') == '1':
                passed = True
            groupName = valueMap.get('group')
            if groupName:
                group = groups.setdefault(groupName, {})
                name = valueMap.get('name')
                score = valueMap.get('score')
                if name and score:
                    group[name] = score

        if not passed:
            mx.abort("Benchmark failed (not passed)")

        return groups
Example #6
0
    def bench(self, vm, cwd=None, extraVmOpts=None, vmbuild=None):
        """
        Run this program as a benchmark.
        """
        if vm in self.ignoredVMs:
            return {}
        if cwd is None:
            cwd = self.defaultCwd
        parser = OutputParser()

        for successRE in self.successREs:
            parser.addMatcher(ValuesMatcher(successRE, {'passed': '1'}))
        for failureRE in self.failureREs:
            parser.addMatcher(ValuesMatcher(failureRE, {'failed': '1'}))
        for scoreMatcher in self.scoreMatchers:
            parser.addMatcher(scoreMatcher)

        if self.benchmarkCompilationRate:
            if vm == 'jvmci':
                bps = re.compile(
                    r"ParsedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                ibps = re.compile(
                    r"InlinedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                parser.addMatcher(
                    ValuesMatcher(
                        bps, {
                            'group': 'ParsedBytecodesPerSecond',
                            'name': self.name,
                            'score': '<rate>'
                        }))
                parser.addMatcher(
                    ValuesMatcher(
                        ibps, {
                            'group': 'InlinedBytecodesPerSecond',
                            'name': self.name,
                            'score': '<rate>'
                        }))
            else:
                ibps = re.compile(
                    r"(?P<compiler>[\w]+) compilation speed: +(?P<rate>[0-9]+) bytes/s {standard"
                )
                parser.addMatcher(
                    ValuesMatcher(
                        ibps, {
                            'group': 'InlinedBytecodesPerSecond',
                            'name': '<compiler>:' + self.name,
                            'score': '<rate>'
                        }))

        startDelim = 'START: ' + self.name
        endDelim = 'END: ' + self.name

        outputfile = os.environ.get('BENCH_OUTPUT', None)
        if outputfile:
            # Used only to debug output parsing
            with open(outputfile) as fp:
                output = fp.read()
                start = output.find(startDelim)
                end = output.find(endDelim, start)
                if start == -1 and end == -1:
                    return {}
                output = output[start + len(startDelim + os.linesep):end]
                mx.log(startDelim)
                mx.log(output)
                mx.log(endDelim)
        else:
            tee = Tee()
            mx.log(startDelim)
            if mx_graal_core.run_vm(
                    self.vmOpts + _noneAsEmptyList(extraVmOpts) + self.cmd,
                    vm,
                    nonZeroIsFatal=False,
                    out=tee.eat,
                    err=subprocess.STDOUT,
                    cwd=cwd,
                    vmbuild=vmbuild) != 0:
                mx.abort("Benchmark failed (non-zero retcode)")
            mx.log(endDelim)
            output = tee.output.getvalue()

        groups = {}
        passed = False
        for valueMap in parser.parse(output):
            assert (valueMap.has_key('name') and valueMap.has_key('score')
                    and valueMap.has_key('group')) or valueMap.has_key(
                        'passed') or valueMap.has_key('failed'), valueMap
            if valueMap.get('failed') == '1':
                mx.abort("Benchmark failed")
            if valueMap.get('passed') == '1':
                passed = True
            groupName = valueMap.get('group')
            if groupName:
                group = groups.setdefault(groupName, {})
                name = valueMap.get('name')
                score = valueMap.get('score')
                if name and score:
                    group[name] = score

        if not passed:
            mx.abort("Benchmark failed (not passed)")

        return groups