Exemplo n.º 1
0
    def parse(inputs, recursive=False, ignore_errors=False):
        """Parse benchmark log files (*.log).

        Args:
            inputs: Path specifiers of where to search for log files.
            recursive (bool): If true, parse directories found in `inputs` recursively.
            ignore_errors (bool): If true, ignore errors associated with parsing parameter values.

        Returns:
            Instance of this class.
        """
        inputs = inputs if isinstance(inputs, list) else [inputs]
        log_files = set()
        for file_path in inputs:
            if os.path.isdir(file_path):
                log_files.update(IOUtils.gather_files(inputs, "*.log", recursive))
            elif file_path.endswith('.log'):
                log_files.add(file_path)
        log_files = list(log_files)
        benchmarks = []
        for log_file in log_files:
            parameters = {}
            with OpenFile(log_file, 'r') as logfile:
                # The 'must_match' must be set to false. It says that not
                # every line in a log file must match key-value pattern.
                DictUtils.add(
                    parameters,
                    logfile,
                    pattern='[ \t]*__(.+?(?=__[ \t]*[=]))__[ \t]*=(.+)',
                    must_match=False,
                    ignore_errors=ignore_errors
                )
            benchmarks.append(parameters)
        return BenchData(benchmarks, create_copy=False)
Exemplo n.º 2
0
def main():
    """Does all log parsing work."""
    opts = parse_args()

    files = IOUtils.gather_files(opts['inputs'], "*.log", opts['recursive'])
    succeeded, failed = LogParser.parse_log_files(files, opts)

    def _dump_data(file_name, opts, data):
        with gzip.open(file_name, 'wb') if opts['_gz'] is True else open(file_name, 'w') as file_obj:
            json.dump({'data': data}, file_obj, indent=4)

    if opts['output_file'] is None:
        json.dump(succeeded, sys.stdout, indent=4, sort_keys=True)
        print ("")
    else:
        IOUtils.mkdirf(opts['output_file'])
        output_files = []
        if len(failed) > 0:
            _dump_data(opts['_failed_file'], opts, failed)
            output_files.append(opts['_failed_file'])

        num_benchmarks = len(succeeded)
        if opts['num_output_files'] is not None:
            opts['benchmarks_per_file'] = int(math.ceil(float(num_benchmarks) / opts['num_output_files']))

        if opts['benchmarks_per_file'] is not None:
            file_index = 0
            while True:
                start_index = file_index * opts['benchmarks_per_file']
                end_index = min(start_index + opts['benchmarks_per_file'], num_benchmarks)
                file_name = IOUtils.get_non_existing_file(
                    "%s_%d.%s" % (opts['_output_file_without_ext'], file_index, opts['_ext'])
                )
                _dump_data(
                    file_name,
                    opts,
                    succeeded[start_index:end_index]
                )
                output_files.append(file_name)
                if end_index >= num_benchmarks:
                    break
                file_index += 1
        else:
            _dump_data(opts['output_file'], opts, succeeded)
            output_files.append(opts['output_file'])
        print("Log parser summary.")
        print("Following files have been created:")
        json.dump(output_files, sys.stdout, indent=4, sort_keys=True)
        print ("")