Example #1
0
def parse(f, context, metadata_dict, suppress_handler, steps):
    """
    Prints the results in the given file to the standard output in a human-
    readable format.

    Returns the report statistics collected by the result handler.
    """

    if not f.endswith(".plist"):
        LOG.info("Skipping input file '" + f + "' as it is not a plist.")
        return {}

    LOG.debug("Parsing input file '" + f + "'")

    buildaction = build_action.BuildAction()

    rh = analyzer_types.construct_parse_handler(buildaction, f,
                                                context.severity_map,
                                                suppress_handler, steps)

    # Set some variables of the result handler to use the saved file.
    rh.analyzer_returncode = 0
    rh.analyzer_result_file = f
    rh.analyzer_cmd = ""

    if 'result_source_files' in metadata_dict and \
            f in metadata_dict['result_source_files']:
        rh.analyzed_source_file = \
            metadata_dict['result_source_files'][f]
    else:
        rh.analyzed_source_file = "UNKNOWN"

    return rh.handle_results()
Example #2
0
def parse(f, context, metadata_dict, suppress_handler, steps):
    """
    Prints the results in the given file to the standard output in a human-
    readable format.
    """

    if not f.endswith(".plist"):
        LOG.info("Skipping input file '" + f + "' as it is not a plist.")
        return

    LOG.debug("Parsing input file '" + f + "'")

    buildaction = build_action.BuildAction()
    if os.path.basename(f).startswith("clangsa_"):
        buildaction.analyzer_type = analyzer_types.CLANG_SA
    elif os.path.basename(f).startswith("clang-tidy_"):
        buildaction.analyzer_type = analyzer_types.CLANG_TIDY

    rh = analyzer_types.construct_parse_handler(buildaction, f,
                                                context.severity_map,
                                                suppress_handler, steps)

    # Set some variables of the result handler to use the saved file.
    rh.analyzer_returncode = 0
    rh.analyzer_result_file = f
    rh.analyzer_cmd = ""

    rh.analyzed_source_file = "UNKNOWN"
    base_f = os.path.basename(f)
    if 'result_source_files' in metadata_dict and \
            base_f in metadata_dict['result_source_files']:
        rh.analyzed_source_file = \
            metadata_dict['result_source_files'][base_f]

    rh.handle_results()
Example #3
0
def consume_plist(item):
    plist, args, context = item
    LOG.info('Consuming ' + plist)

    action = build_action.BuildAction()
    action.analyzer_type = analyzer_types.CLANG_SA
    action.original_command = 'Imported from PList directly'

    rh = analyzer_types.construct_result_handler(args, action, context.run_id,
                                                 args.directory,
                                                 context.severity_map, None,
                                                 None, not args.stdout)

    rh.analyzer_returncode = 0
    rh.buildaction.analyzer_type = 'Build action from plist'
    rh.buildaction.original_command = plist
    rh.analyzer_cmd = ''
    rh.analyzed_source_file = ''  # TODO: fill from plist.
    rh.result_file = os.path.join(args.directory, plist)
    rh.handle_results()
Example #4
0
def consume_plist(item):
    f, context, metadata_dict = item

    LOG.debug("Parsing input file '" + f + "'")

    if 'working_directory' in metadata_dict:
        os.chdir(metadata_dict['working_directory'])

    buildaction = build_action.BuildAction()
    if os.path.basename(f).startswith("clangsa_"):
        buildaction.analyzer_type = analyzer_types.CLANG_SA
    elif os.path.basename(f).startswith("clang-tidy_"):
        buildaction.analyzer_type = analyzer_types.CLANG_TIDY

    buildaction.original_command = "IMPORTED"

    rh = analyzer_types.construct_store_handler(buildaction, context.run_id,
                                                context.severity_map)

    rh.analyzer_returncode = 0
    rh.analyzer_cmd = ''

    rh.analyzed_source_file = "UNKNOWN"
    base_f = os.path.basename(f)
    if 'result_source_files' in metadata_dict and\
            base_f in metadata_dict['result_source_files']:
        rh.analyzed_source_file = \
            metadata_dict['result_source_files'][base_f]
    rh.analyzer_result_file = f

    if rh.analyzed_source_file == "UNKNOWN":
        LOG.info("Storing defects in input file '" + base_f + "'")
    else:
        LOG.info("Storing analysis results for file '" +
                 rh.analyzed_source_file + "'")
    rh.handle_results()
Example #5
0
def parse_compile_commands_json(logfile, add_compiler_defaults=False):
    import json
    LOG.debug('parse_compile_commands_json: ' + str(add_compiler_defaults))

    actions = []
    filtered_build_actions = {}

    logfile.seek(0)
    data = json.load(logfile)

    compiler_defines = {}
    compiler_includes = {}

    counter = 0
    for entry in data:
        sourcefile = entry['file']

        if not os.path.isabs(sourcefile):
            # Newest versions of intercept-build can create the 'file' in the
            # JSON Compilation Database as a relative path.
            sourcefile = os.path.join(os.path.abspath(entry['directory']),
                                      sourcefile)

        lang = option_parser.get_language(sourcefile[sourcefile.rfind('.'):])

        if not lang:
            continue

        action = build_action.BuildAction(counter)
        if 'command' in entry:
            command = entry['command']

            # Old versions of intercept-build (confirmed to those shipping
            # with upstream clang-5.0) do escapes in another way:
            # -DVARIABLE="a b" becomes -DVARIABLE=\"a b\" in the output.
            # This would be messed up later on by options_parser, so need a
            # fix here. (Should be removed once we are sure noone uses this
            # intercept-build anymore!)
            if r'\"' in command:
                command = command.replace(r'\"', '"')
        elif 'arguments' in entry:
            # Newest versions of intercept-build create an argument vector
            # instead of a command string.
            command = ' '.join(entry['arguments'])
        else:
            raise KeyError("No valid 'command' or 'arguments' entry found!")
        results = option_parser.parse_options(command)

        action.original_command = command
        action.analyzer_options = results.compile_opts

        action.lang = results.lang
        action.target = results.arch

        # Store the compiler built in include paths and defines.
        if add_compiler_defaults and results.compiler:
            if not (results.compiler in compiler_defines):
                # Fetch defaults from the compiler,
                # make sure we use the correct architecture.
                extra_opts = []
                for regex in COMPILE_OPTS_FWD_TO_DEFAULTS_GETTER:
                    pattern = re.compile(regex)
                    for comp_opt in action.analyzer_options:
                        if re.match(pattern, comp_opt):
                            extra_opts.append(comp_opt)

                compiler_defines[results.compiler] = \
                    get_compiler_defines(results.compiler, extra_opts)
                compiler_includes[results.compiler] = \
                    get_compiler_includes(results.compiler, extra_opts)
            action.compiler_defines = compiler_defines[results.compiler]
            action.compiler_includes = compiler_includes[results.compiler]

        if results.action == option_parser.ActionType.COMPILE or \
           results.action == option_parser.ActionType.LINK:
            action.skip = False

        # TODO: Check arch.
        action.directory = entry['directory']
        action.sources = sourcefile
        # Filter out duplicate compilation commands.
        unique_key = action.cmp_key
        if filtered_build_actions.get(unique_key) is None:
            filtered_build_actions[unique_key] = action

        del action
        counter += 1

    for ba_hash, ba in filtered_build_actions.items():
        actions.append(ba)

    return actions
Example #6
0
def parse_compile_commands_json(logfile, parseLogOptions):
    """
    logfile: is a compile command json
    """

    output_path = parseLogOptions.output_path
    if output_path is not None:
        remove_file_if_exists(
            os.path.join(output_path, compiler_includes_dump_file))
        remove_file_if_exists(
            os.path.join(output_path, compiler_target_dump_file))

    actions = []
    filtered_build_actions = {}

    data = json.load(logfile)

    compiler_includes = {}
    compiler_target = {}

    counter = 0
    for entry in data:
        sourcefile = entry['file']

        if not os.path.isabs(sourcefile):
            # Newest versions of intercept-build can create the 'file' in the
            # JSON Compilation Database as a relative path.
            sourcefile = os.path.join(os.path.abspath(entry['directory']),
                                      sourcefile)

        lang = option_parser.get_language(sourcefile[sourcefile.rfind('.'):])

        if not lang:
            continue

        action = build_action.BuildAction(counter)
        if 'command' in entry:
            command = entry['command']

            # Old versions of intercept-build (confirmed to those shipping
            # with upstream clang-5.0) do escapes in another way:
            # -DVARIABLE="a b" becomes -DVARIABLE=\"a b\" in the output.
            # This would be messed up later on by options_parser, so need a
            # fix here. (Should be removed once we are sure noone uses this
            # intercept-build anymore!)
            if r'\"' in command:
                command = command.replace(r'\"', '"')
        elif 'arguments' in entry:
            # Newest versions of intercept-build create an argument vector
            # instead of a command string.
            command = ' '.join(entry['arguments'])
        else:
            raise KeyError("No valid 'command' or 'arguments' entry found!")
        results = option_parser.parse_options(command)

        action.original_command = command

        # If the original include directory could not be found
        # in the filesystem, it is possible that it was provided
        # relative to the working directory in the compile json.
        compile_opts = results.compile_opts
        for i, opt in enumerate(compile_opts):
            if opt.startswith('-I'):
                inc_dir = opt[2:].strip()
                if not os.path.isdir(inc_dir):
                    compile_opts[i] = '-I' + \
                        os.path.join(entry['directory'], inc_dir)

        action.analyzer_options = compile_opts

        action.lang = results.lang
        action.target = results.arch
        action.output = results.output

        add_compiler_defaults = True

        # With gcc-toolchain a non default compiler toolchain can be set.
        # Clang will search for include paths and libraries based on the
        # gcc-toolchain parameter.
        # Detecting extra include paths from the host compiler could
        # conflict with this.

        # For example if the compiler in the compile command is clang
        # and gcc-toolchain is set we will get the include paths
        # for clang and not for the compiler set in gcc-toolchain.
        # This can cause missing headers during the analysis.

        toolchain = gcc_toolchain.toolchain_in_args(action.analyzer_options)
        if toolchain:
            add_compiler_defaults = False

        # Store the compiler built in include paths and defines.
        if add_compiler_defaults and results.compiler:
            if not (results.compiler in compiler_includes):
                # Fetch defaults from the compiler,
                # make sure we use the correct architecture.
                extra_opts = []
                for regex in COMPILE_OPTS_FWD_TO_DEFAULTS_GETTER:
                    pattern = re.compile(regex)
                    for comp_opt in action.analyzer_options:
                        if re.match(pattern, comp_opt):
                            extra_opts.append(comp_opt)

                compiler_includes[results.compiler] = \
                    get_compiler_includes(parseLogOptions, results.compiler,
                                          results.lang, results.compile_opts,
                                          extra_opts)

            if not (results.compiler in compiler_target):
                compiler_target[results.compiler] = \
                    get_compiler_target(parseLogOptions, results.compiler)

            action.compiler_includes = compiler_includes[results.compiler]
            action.target = compiler_target[results.compiler]

        if results.action != option_parser.ActionType.COMPILE:
            action.skip = True

        # TODO: Check arch.
        action.directory = entry['directory']
        action.sources = sourcefile
        # Filter out duplicate compilation commands.
        unique_key = action.cmp_key
        if filtered_build_actions.get(unique_key) is None:
            filtered_build_actions[unique_key] = action

        del action
        counter += 1

    for _, ba in filtered_build_actions.items():
        actions.append(ba)
    return actions
Example #7
0
def parse_compile_commands_json(logfile, add_compiler_defaults=False):
    import json
    LOG.debug('parse_compile_commands_json: ' + str(add_compiler_defaults))

    actions = []
    filtered_build_actions = {}

    logfile.seek(0)
    data = json.load(logfile)

    compiler_defines = {}
    compiler_includes = {}

    counter = 0
    for entry in data:
        sourcefile = entry['file']
        lang = option_parser.get_language(sourcefile[sourcefile.rfind('.'):])

        if not lang:
            continue

        action = build_action.BuildAction(counter)

        command = entry['command']
        results = option_parser.parse_options(command)

        action.original_command = command
        action.analyzer_options = results.compile_opts
        action.lang = results.lang
        action.target = results.arch

        # store the compiler built in include paths
        # and defines
        if add_compiler_defaults and results.compiler:
            if not (results.compiler in compiler_defines):
                compiler_defines[results.compiler] = \
                    get_compiler_defines(results.compiler)
                compiler_includes[results.compiler] = \
                    get_compiler_includes(results.compiler)
            action.compiler_defines = compiler_defines[results.compiler]
            action.compiler_includes = compiler_includes[results.compiler]

        if results.action == option_parser.ActionType.COMPILE or \
           results.action == option_parser.ActionType.LINK:
            action.skip = False

        # TODO: check arch.
        action.directory = entry['directory']
        action.sources = sourcefile
        # Filter out duplicate compilation commands.
        unique_key = action.cmp_key
        if filtered_build_actions.get(unique_key) is None:
            filtered_build_actions[unique_key] = action

        del action
        counter += 1

    for ba_hash, ba in filtered_build_actions.items():
        actions.append(ba)

    return actions