Exemplo n.º 1
0
    def test(self, vm, cwd=None, extraVmOpts=None, vmbuild=None):
        """
        Run this program as a sanity test.
        """
        if vm in self.ignoredVMs:
            return True
        if cwd is None:
            cwd = self.defaultCwd
        parser = OutputParser()
        jvmError = re.compile(
            r"(?P<jvmerror>([A-Z]:|/).*[/\\]hs_err_pid[0-9]+\.log)")
        parser.addMatcher(ValuesMatcher(jvmError, {'jvmError': '<jvmerror>'}))

        for successRE in self.successREs:
            parser.addMatcher(ValuesMatcher(successRE, {'passed': '1'}))
        for failureRE in self.failureREs:
            parser.addMatcher(ValuesMatcher(failureRE, {'failed': '1'}))

        tee = Tee()
        retcode = mx_graal_core.run_vm(
            self.vmOpts + _noneAsEmptyList(extraVmOpts) + self.cmd,
            vm,
            nonZeroIsFatal=False,
            out=tee.eat,
            err=subprocess.STDOUT,
            cwd=cwd,
            vmbuild=vmbuild)
        output = tee.output.getvalue()
        valueMaps = parser.parse(output)

        if len(valueMaps) == 0:
            return False

        record = {}
        for valueMap in valueMaps:
            for key, value in valueMap.items():
                if record.has_key(key) and record[key] != value:
                    mx.abort(
                        'Inconsistant values returned by test machers : ' +
                        str(valueMaps))
                record[key] = value

        jvmErrorFile = record.get('jvmError')
        if jvmErrorFile:
            mx.log('/!\\JVM Error : dumping error log...')
            with open(jvmErrorFile, 'rb') as fp:
                mx.log(fp.read())
            os.unlink(jvmErrorFile)
            return False

        if record.get('failed') == '1':
            return False

        return retcode == 0 and record.get('passed') == '1'
Exemplo n.º 2
0
    def test(self, vm, cwd=None, extraVmOpts=None, vmbuild=None):
        """
        Run this program as a sanity test.
        """
        if vm in self.ignoredVMs:
            return True

        self.lazyInit()
        if cwd is None:
            cwd = self.defaultCwd
        parser = OutputParser()
        jvmError = re.compile(r"(?P<jvmerror>([A-Z]:|/).*[/\\]hs_err_pid[0-9]+\.log)")
        parser.addMatcher(ValuesMatcher(jvmError, {'jvmError' : '<jvmerror>'}))

        for successRE in self.successREs:
            parser.addMatcher(ValuesMatcher(successRE, {'passed' : '1'}))
        for failureRE in self.failureREs:
            parser.addMatcher(ValuesMatcher(failureRE, {'failed' : '1'}))

        tee = Tee()
        retcode = mx_graal_core.run_vm(self.vmOpts + _noneAsEmptyList(extraVmOpts) + self.cmd, vm, nonZeroIsFatal=False, out=tee.eat, err=subprocess.STDOUT, cwd=cwd, vmbuild=vmbuild)
        output = tee.output.getvalue()
        valueMaps = parser.parse(output)

        if len(valueMaps) == 0:
            return False

        record = {}
        for valueMap in valueMaps:
            for key, value in valueMap.items():
                if record.has_key(key) and record[key] != value:
                    mx.abort('Inconsistant values returned by test machers : ' + str(valueMaps))
                record[key] = value

        jvmErrorFile = record.get('jvmError')
        if jvmErrorFile:
            mx.log('/!\\JVM Error : dumping error log...')
            with open(jvmErrorFile, 'rb') as fp:
                mx.log(fp.read())
            os.unlink(jvmErrorFile)
            return False

        if record.get('failed') == '1':
            return False

        return retcode == 0 and record.get('passed') == '1'
Exemplo n.º 3
0
    def bench(self, vm, cwd=None, extraVmOpts=[], vmbuild=None):
        """
        Run this program as a benchmark.
        """
        if (vm in self.ignoredVMs):
            return {}
        if cwd is None:
            cwd = self.defaultCwd
        parser = OutputParser()
        
        for successRE in self.successREs:
            parser.addMatcher(ValuesMatcher(successRE, {'passed' : '1'}))
        for failureRE in self.failureREs:
            parser.addMatcher(ValuesMatcher(failureRE, {'failed' : '1'}))
        for scoreMatcher in self.scoreMatchers:
            parser.addMatcher(scoreMatcher)

        if self.benchmarkCompilationRate:
            if vm == 'graal':
                bps = re.compile(r"ParsedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                ibps = re.compile(r"InlinedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                parser.addMatcher(ValuesMatcher(bps, {'group' : 'ParsedBytecodesPerSecond', 'name' : self.name, 'score' : '<rate>'}))
                parser.addMatcher(ValuesMatcher(ibps, {'group' : 'InlinedBytecodesPerSecond', 'name' : self.name, 'score' : '<rate>'}))
            else:
                ibps = re.compile(r"(?P<compiler>[\w]+) compilation speed: +(?P<rate>[0-9]+) bytes/s {standard")
                parser.addMatcher(ValuesMatcher(ibps, {'group' : 'InlinedBytecodesPerSecond', 'name' : '<compiler>:' + self.name, 'score' : '<rate>'}))
            
        startDelim = 'START: ' + self.name
        endDelim = 'END: ' + self.name
        
        outputfile = os.environ.get('BENCH_OUTPUT', None)
        if outputfile:
            # Used only to debug output parsing
            with open(outputfile) as fp:
                output = fp.read()
                start = output.find(startDelim)
                end = output.find(endDelim, start)
                if start == -1 and end == -1:
                    return {}
                output = output[start + len(startDelim + os.linesep): end]
                mx.log(startDelim)
                mx.log(output)
                mx.log(endDelim)
        else:
            tee = Tee()
            mx.log(startDelim)
            if commands.vm(self.vmOpts + extraVmOpts + self.cmd, vm, nonZeroIsFatal=False, out=tee.eat, err=subprocess.STDOUT, cwd=cwd, vmbuild=vmbuild) != 0:
                mx.abort("Benchmark failed (non-zero retcode)")
            mx.log(endDelim)
            output = tee.output.getvalue()

        groups = {}
        passed = False
        for valueMap in parser.parse(output):
            assert (valueMap.has_key('name') and valueMap.has_key('score') and valueMap.has_key('group')) or valueMap.has_key('passed') or valueMap.has_key('failed'), valueMap
            if valueMap.get('failed') == '1':
                mx.abort("Benchmark failed")
            if valueMap.get('passed') == '1':
                passed = True
            groupName = valueMap.get('group')
            if groupName:
                group = groups.setdefault(groupName, {})
                name = valueMap.get('name')
                score = valueMap.get('score')
                if name and score:
                    group[name] = score
        
        if not passed:
            mx.abort("Benchmark failed (not passed)")
        
        return groups
Exemplo n.º 4
0
    def bench(self, vm, cwd=None, opts=[], vmbuild=None):
        """
        Run this program as a benchmark.
        """
        if (vm in self.ignoredVMs):
            return {}
        if cwd is None:
            cwd = self.defaultCwd
        parser = OutputParser()

        for successRE in self.successREs:
            parser.addMatcher(ValuesMatcher(successRE, {'passed': '1'}))
        for failureRE in self.failureREs:
            parser.addMatcher(ValuesMatcher(failureRE, {'failed': '1'}))
        for scoreMatcher in self.scoreMatchers:
            parser.addMatcher(scoreMatcher)

        if self.benchmarkCompilationRate:
            if vm == 'graal':
                bps = re.compile(
                    r"ParsedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                ibps = re.compile(
                    r"InlinedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                parser.addMatcher(
                    ValuesMatcher(
                        bps, {
                            'group': 'ParsedBytecodesPerSecond',
                            'name': self.name,
                            'score': '<rate>'
                        }))
                parser.addMatcher(
                    ValuesMatcher(
                        ibps, {
                            'group': 'InlinedBytecodesPerSecond',
                            'name': self.name,
                            'score': '<rate>'
                        }))
            else:
                ibps = re.compile(
                    r"(?P<compiler>[\w]+) compilation speed: +(?P<rate>[0-9]+) bytes/s {standard"
                )
                parser.addMatcher(
                    ValuesMatcher(
                        ibps, {
                            'group': 'InlinedBytecodesPerSecond',
                            'name': '<compiler>:' + self.name,
                            'score': '<rate>'
                        }))

        startDelim = 'START: ' + self.name
        endDelim = 'END: ' + self.name

        outputfile = os.environ.get('BENCH_OUTPUT', None)
        if outputfile:
            # Used only to debug output parsing
            with open(outputfile) as fp:
                output = fp.read()
                start = output.find(startDelim)
                end = output.find(endDelim, start)
                if start == -1 and end == -1:
                    return {}
                output = output[start + len(startDelim + os.linesep):end]
                mx.log(startDelim)
                mx.log(output)
                mx.log(endDelim)
        else:
            tee = Tee()
            mx.log(startDelim)
            if commands.vm(self.vmOpts + opts + self.cmd,
                           vm,
                           nonZeroIsFatal=False,
                           out=tee.eat,
                           err=subprocess.STDOUT,
                           cwd=cwd,
                           vmbuild=vmbuild) != 0:
                mx.abort("Benchmark failed (non-zero retcode)")
            mx.log(endDelim)
            output = tee.output.getvalue()

        groups = {}
        passed = False
        for valueMap in parser.parse(output):
            assert (valueMap.has_key('name') and valueMap.has_key('score')
                    and valueMap.has_key('group')) or valueMap.has_key(
                        'passed') or valueMap.has_key('failed'), valueMap
            if valueMap.get('failed') == '1':
                mx.abort("Benchmark failed")
            if valueMap.get('passed') == '1':
                passed = True
            groupName = valueMap.get('group')
            if groupName:
                group = groups.setdefault(groupName, {})
                name = valueMap.get('name')
                score = valueMap.get('score')
                if name and score:
                    group[name] = score

        if not passed:
            mx.abort("Benchmark failed (not passed)")

        return groups
Exemplo n.º 5
0
    def bench(self, vm, cwd=None, extraVmOpts=None, vmbuild=None):
        """
        Run this program as a benchmark.
        Copied from sanitycheck.Test to extend benchmarking for non-JVMs.
        """
        if vm in self.ignoredVMs:
            return {}
        if cwd is None:
            cwd = self.defaultCwd
        parser = OutputParser()

        for successRE in self.successREs:
            parser.addMatcher(ValuesMatcher(successRE, {'passed' : '1'}))
        for failureRE in self.failureREs:
            parser.addMatcher(ValuesMatcher(failureRE, {'failed' : '1'}))
        for scoreMatcher in self.scoreMatchers:
            parser.addMatcher(scoreMatcher)

        if self.benchmarkCompilationRate:
            if vm == 'graal':
                bps = re.compile(r"ParsedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                ibps = re.compile(r"InlinedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                parser.addMatcher(ValuesMatcher(bps, {'group' : 'ParsedBytecodesPerSecond', 'name' : self.name, 'score' : '<rate>'}))
                parser.addMatcher(ValuesMatcher(ibps, {'group' : 'InlinedBytecodesPerSecond', 'name' : self.name, 'score' : '<rate>'}))
            else:
                ibps = re.compile(r"(?P<compiler>[\w]+) compilation speed: +(?P<rate>[0-9]+) bytes/s {standard")
                parser.addMatcher(ValuesMatcher(ibps, {'group' : 'InlinedBytecodesPerSecond', 'name' : '<compiler>:' + self.name, 'score' : '<rate>'}))

        startDelim = 'START: ' + self.name
        endDelim = 'END: ' + self.name

        outputfile = os.environ.get('BENCH_OUTPUT', None)
        if outputfile:
            # Used only to debug output parsing
            with open(outputfile) as fp:
                output = fp.read()
                start = output.find(startDelim)
                end = output.find(endDelim, start)
                if start == -1 and end == -1:
                    return {}
                output = output[start + len(startDelim + os.linesep): end]
                mx.log(startDelim)
                mx.log(output)
                mx.log(endDelim)
        else:
            tee = Tee()
            mx.log(startDelim)
            # zippy
            result = -1
            if vm == 'cpython2':
                result = mx.run(['python'] + self.cmd[-2:], out=tee.eat)
            elif vm == 'cpython':
                result = mx.run(['python3'] + self.cmd[-2:], out=tee.eat)
            elif vm == 'jython':
                result = mx_graal.vm(['-jar', mx.library('JYTHON').path] + self.cmd[-2:], vm = 'original', out=tee.eat)
            elif vm == 'pypy':
                result = mx.run(['pypy'] + self.cmd[-2:], out=tee.eat)
            elif vm == 'pypy3':
                result = mx.run(['pypy3'] + self.cmd[-2:], out=tee.eat)
            else:
                result = mx_graal.vm(self.vmOpts + _noneAsEmptyList(extraVmOpts) + self.cmd, vm, nonZeroIsFatal=False, out=tee.eat, err=subprocess.STDOUT, cwd=cwd, vmbuild=vmbuild)

            if result != 0:
                mx.abort("Benchmark failed (non-zero retcode)")
            # wait for subprocess to finish
            time.sleep(.5)
            mx.log(endDelim)
            output = tee.output.getvalue()

        groups = {}
        passed = False
        for valueMap in parser.parse(output):
            assert (valueMap.has_key('name') and valueMap.has_key('score') and valueMap.has_key('group')) or valueMap.has_key('passed') or valueMap.has_key('failed'), valueMap
            if valueMap.get('failed') == '1':
                mx.abort("Benchmark failed")
            if valueMap.get('passed') == '1':
                passed = True
            groupName = valueMap.get('group')
            if groupName:
                group = groups.setdefault(groupName, {})
                name = valueMap.get('name')
                score = valueMap.get('score')
                if name and score:
                    group[name] = score

        if not passed:
            mx.abort("Benchmark failed (not passed)")

        return groups
Exemplo n.º 6
0
    def bench(self, vm, cwd=None, extraVmOpts=None, vmbuild=None):
        """
        Run this program as a benchmark.
        Copied from sanitycheck.Test to extend benchmarking for non-JVMs.
        """
        if vm in self.ignoredVMs:
            return {}
        if cwd is None:
            cwd = self.defaultCwd
        parser = OutputParser()

        for successRE in self.successREs:
            parser.addMatcher(ValuesMatcher(successRE, {'passed': '1'}))
        for failureRE in self.failureREs:
            parser.addMatcher(ValuesMatcher(failureRE, {'failed': '1'}))
        for scoreMatcher in self.scoreMatchers:
            parser.addMatcher(scoreMatcher)

        if self.benchmarkCompilationRate:
            if vm == 'graal':
                bps = re.compile(
                    r"ParsedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                ibps = re.compile(
                    r"InlinedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
                parser.addMatcher(
                    ValuesMatcher(
                        bps, {
                            'group': 'ParsedBytecodesPerSecond',
                            'name': self.name,
                            'score': '<rate>'
                        }))
                parser.addMatcher(
                    ValuesMatcher(
                        ibps, {
                            'group': 'InlinedBytecodesPerSecond',
                            'name': self.name,
                            'score': '<rate>'
                        }))
            else:
                ibps = re.compile(
                    r"(?P<compiler>[\w]+) compilation speed: +(?P<rate>[0-9]+) bytes/s {standard"
                )
                parser.addMatcher(
                    ValuesMatcher(
                        ibps, {
                            'group': 'InlinedBytecodesPerSecond',
                            'name': '<compiler>:' + self.name,
                            'score': '<rate>'
                        }))

        startDelim = 'START: ' + self.name
        endDelim = 'END: ' + self.name

        outputfile = os.environ.get('BENCH_OUTPUT', None)
        if outputfile:
            # Used only to debug output parsing
            with open(outputfile) as fp:
                output = fp.read()
                start = output.find(startDelim)
                end = output.find(endDelim, start)
                if start == -1 and end == -1:
                    return {}
                output = output[start + len(startDelim + os.linesep):end]
                mx.log(startDelim)
                mx.log(output)
                mx.log(endDelim)
        else:
            tee = Tee()
            mx.log(startDelim)
            # zippy
            result = -1
            if vm == 'cpython2':
                result = mx.run(['python'] + self.cmd[-2:], out=tee.eat)
            elif vm == 'cpython':
                result = mx.run(['python3'] + self.cmd[-2:], out=tee.eat)
            elif vm == 'jython':
                result = mx_graal.vm(
                    ['-jar', mx.library('JYTHON').path] + self.cmd[-2:],
                    vm='original',
                    out=tee.eat)
            elif vm == 'pypy':
                result = mx.run(['pypy'] + self.cmd[-2:], out=tee.eat)
            elif vm == 'pypy3':
                result = mx.run(['pypy3'] + self.cmd[-2:], out=tee.eat)
            else:
                result = mx_graal.vm(self.vmOpts +
                                     _noneAsEmptyList(extraVmOpts) + self.cmd,
                                     vm,
                                     nonZeroIsFatal=False,
                                     out=tee.eat,
                                     err=subprocess.STDOUT,
                                     cwd=cwd,
                                     vmbuild=vmbuild)

            if result != 0:
                mx.abort("Benchmark failed (non-zero retcode)")
            # wait for subprocess to finish
            time.sleep(.5)
            mx.log(endDelim)
            output = tee.output.getvalue()

        groups = {}
        passed = False
        for valueMap in parser.parse(output):
            assert (valueMap.has_key('name') and valueMap.has_key('score')
                    and valueMap.has_key('group')) or valueMap.has_key(
                        'passed') or valueMap.has_key('failed'), valueMap
            if valueMap.get('failed') == '1':
                mx.abort("Benchmark failed")
            if valueMap.get('passed') == '1':
                passed = True
            groupName = valueMap.get('group')
            if groupName:
                group = groups.setdefault(groupName, {})
                name = valueMap.get('name')
                score = valueMap.get('score')
                if name and score:
                    group[name] = score

        if not passed:
            mx.abort("Benchmark failed (not passed)")

        return groups