Exemplo n.º 1
0
def main(args):
    """
    Perform analysis on the given logfiles and store the results in a machine-
    readable format.
    """
    logger.setup_logger(args.verbose if 'verbose' in args else None)

    if len(args.logfile) != 1:
        LOG.warning("Only one log file can be processed right now!")
        sys.exit(1)

    args.output_path = os.path.abspath(args.output_path)
    if os.path.exists(args.output_path) and \
            not os.path.isdir(args.output_path):
        LOG.error("The given output path is not a directory: " +
                  args.output_path)
        sys.exit(1)

    if 'enable_all' in args:
        LOG.info("'--enable-all' was supplied for this analysis.")

    # We clear the output directory in the following cases.
    ctu_dir = os.path.join(args.output_path, 'ctu-dir')
    if 'ctu_phases' in args and args.ctu_phases[0] and \
            os.path.isdir(ctu_dir):
        # Clear the CTU-dir if the user turned on the collection phase.
        LOG.debug("Previous CTU contents have been deleted.")
        shutil.rmtree(ctu_dir)

    if 'clean' in args and os.path.isdir(args.output_path):
        LOG.info("Previous analysis results in '{0}' have been removed, "
                 "overwriting with current result".format(args.output_path))
        shutil.rmtree(args.output_path)

    if not os.path.exists(args.output_path):
        os.makedirs(args.output_path)

    LOG.debug("args: " + str(args))
    LOG.debug("Output will be stored to: '" + args.output_path + "'")

    # Parse the JSON CCDBs and retrieve the compile commands.
    actions = []
    for log_file in args.logfile:
        if not os.path.exists(log_file):
            LOG.error("The specified logfile '" + log_file + "' does not "
                      "exist!")
            continue

        parseLogOptions = ParseLogOptions(args)
        actions += log_parser.parse_log(log_file, parseLogOptions)
    if len(actions) == 0:
        LOG.info("None of the specified build log files contained "
                 "valid compilation commands. No analysis needed...")
        sys.exit(1)

    context = generic_package_context.get_context()
    metadata = {
        'action_num': len(actions),
        'command': sys.argv,
        'versions': {
            'codechecker':
            "{0} ({1})".format(context.package_git_tag,
                               context.package_git_hash)
        },
        'working_directory': os.getcwd(),
        'output_path': args.output_path,
        'result_source_files': {}
    }

    if 'name' in args:
        metadata['name'] = args.name

    # Update metadata dictionary with old values.
    metadata_file = os.path.join(args.output_path, 'metadata.json')
    if os.path.exists(metadata_file):
        with open(metadata_file, 'r') as data:
            metadata_prev = json.load(data)
            metadata['result_source_files'] =\
                metadata_prev['result_source_files']

    analyzer.perform_analysis(args, context, actions, metadata)

    LOG.debug("Analysis metadata write to '" + metadata_file + "'")
    with open(metadata_file, 'w') as metafile:
        json.dump(metadata, metafile)

    # WARN: store command will search for this file!!!!
    compile_cmd_json = os.path.join(args.output_path, 'compile_cmd.json')
    try:
        source = os.path.abspath(args.logfile[0])
        target = os.path.abspath(compile_cmd_json)

        if source != target:
            shutil.copyfile(source, target)
    except shutil.Error:
        LOG.debug("Compilation database JSON file is the same.")
    except Exception:
        LOG.debug("Copying compilation database JSON file failed.")
Exemplo n.º 2
0
def main(args):
    """
    Perform analysis on the given logfiles and store the results in a machine-
    readable format.
    """

    context = generic_package_context.get_context()

    # Parse the JSON CCDBs and retrieve the compile commands.
    actions = []

    for log_file in args.logfile:
        if not os.path.exists(log_file):
            LOG.error("The specified logfile '" + log_file + "' does not "
                      "exist!")
            continue

        actions += log_parser.parse_log(log_file,
                                        args.add_compiler_defaults)

    if len(actions) == 0:
        LOG.info("None of the specified build log files contained "
                 "valid compilation commands. No analysis needed...")
        return

    if 'enable_all' in args:
        LOG.info("'--enable-all' was supplied for this analysis.")

    # Run the analysis.
    args.output_path = os.path.abspath(args.output_path)
    if os.path.isdir(args.output_path):
        LOG.info("Previous analysis results in '{0}' have been "
                 "removed, overwriting with current result".
                 format(args.output_path))
        shutil.rmtree(args.output_path)
    os.makedirs(args.output_path)

    LOG.debug("Output will be stored to: '" + args.output_path + "'")

    metadata = {'action_num': len(actions),
                'command': sys.argv,
                'versions': {
                    'codechecker': "{0} ({1})".format(context.package_git_tag,
                                                      context.package_git_hash)
                },
                'working_directory': os.getcwd(),
                'output_path': args.output_path}

    if 'name' in args:
        metadata['name'] = args.name

    if 'skipfile' in args:
        # Skip information needs to be saved because reports in a header
        # can only be skipped by the report-server used in 'store' later
        # on if this information is persisted.
        with open(args.skipfile, 'r') as skipfile:
            metadata['skip_data'] = [l.strip() for l in skipfile.readlines()]

    analyzer.perform_analysis(args, context, actions, metadata)

    metadata_path = os.path.join(args.output_path, 'metadata.json')
    LOG.debug("Analysis metadata write to '" + metadata_path + "'")
    with open(metadata_path, 'w') as metafile:
        json.dump(metadata, metafile)
Exemplo n.º 3
0
def main(args):
    """
    Perform analysis on the given logfiles and store the results in a machine-
    readable format.
    """

    context = generic_package_context.get_context()

    # Parse the JSON CCDBs and retrieve the compile commands.
    actions = []

    if len(args.logfile) != 1:
        LOG.warning("Only one log file can be processed right now!")
        sys.exit(1)

    for log_file in args.logfile:
        if not os.path.exists(log_file):
            LOG.error("The specified logfile '" + log_file + "' does not "
                      "exist!")
            continue

        actions += log_parser.parse_log(log_file, args.add_compiler_defaults)

    if len(actions) == 0:
        LOG.info("None of the specified build log files contained "
                 "valid compilation commands. No analysis needed...")
        sys.exit(1)

    if 'enable_all' in args:
        LOG.info("'--enable-all' was supplied for this analysis.")

    # Run the analysis.
    args.output_path = os.path.abspath(args.output_path)
    if 'ctu_phases' not in args or args.ctu_phases[0] or \
            (not args.ctu_phases[0] and not args.ctu_phases[1]):
        if os.path.isdir(args.output_path):
            LOG.info("Previous analysis results in '{0}' have been "
                     "removed, overwriting with current result".format(
                         args.output_path))
            shutil.rmtree(args.output_path)
        os.makedirs(args.output_path)

    LOG.debug("Output will be stored to: '" + args.output_path + "'")

    metadata = {
        'action_num': len(actions),
        'command': sys.argv,
        'versions': {
            'codechecker':
            "{0} ({1})".format(context.package_git_tag,
                               context.package_git_hash)
        },
        'working_directory': os.getcwd(),
        'output_path': args.output_path
    }

    if 'name' in args:
        metadata['name'] = args.name

    if 'skipfile' in args:
        # Skip information needs to be saved because reports in a header
        # can only be skipped by the report-server used in 'store' later
        # on if this information is persisted.
        with open(args.skipfile, 'r') as skipfile:
            metadata['skip_data'] = [l.strip() for l in skipfile.readlines()]

    analyzer.perform_analysis(args, context, actions, metadata)

    metadata_path = os.path.join(args.output_path, 'metadata.json')
    LOG.debug("Analysis metadata write to '" + metadata_path + "'")
    with open(metadata_path, 'w') as metafile:
        json.dump(metadata, metafile)

    # WARN: store command will search for this file!!!!
    compile_cmd_json = os.path.join(args.output_path, 'compile_cmd.json')
    try:
        source = os.path.abspath(args.logfile[0])
        target = os.path.abspath(compile_cmd_json)
        shutil.copyfile(source, target)
    except shutil.Error as serr:
        LOG.debug("Compile command json file is the same")
    except Exception as ex:
        LOG.debug("Copying compile command json file failed.")

    LOG.info("Analysis finished.")
    LOG.info("To view results in the terminal use the "
             "\"CodeChecker parse\" command.")
    LOG.info("To store results use the \"CodeChecker store\" command.")
    LOG.info("See --help and the user guide for further options about"
             " parsing and storing the reports.")
    LOG.info("----=================----")