Exemplo n.º 1
0
    def test_new_ldlogger(self):
        """
        Test log file parsing escape behaviour with after-#631 LD-LOGGER.
        """
        logfile = os.path.join(self.__test_files, "ldlogger-new.json")

        # LD-LOGGERS after http://github.com/Ericsson/codechecker/pull/631
        # now properly log the multiword arguments. When these are parsed by
        # the log_parser, the define's value will be passed to the analyzer.
        #
        # Logfile contains -DVARIABLE="some value"
        # and --target=x86_64-linux-gnu.

        build_action = log_parser.parse_log(load_json_or_empty(logfile))[0]

        self.assertEqual(build_action.source, r'/tmp/a.cpp')
        self.assertEqual(len(build_action.analyzer_options), 1)
        self.assertTrue(len(build_action.target) > 0)
        self.assertEqual(build_action.analyzer_options[0],
                         r'-DVARIABLE="\"some value"\"')

        # Test source file with spaces.
        logfile = os.path.join(self.__test_files, "ldlogger-new-space.json")

        build_action = log_parser.parse_log(load_json_or_empty(logfile))[0]

        self.assertEqual(build_action.source, r'/tmp/a\ b.cpp')
        self.assertEqual(build_action.lang, 'c++')
Exemplo n.º 2
0
    def test_old_intercept_build(self):
        """
        Test log file parsing escape behaviour with clang-5.0 intercept-build.
        """
        logfile = os.path.join(self.__test_files, "intercept-old.json")

        # Scan-build-py shipping with clang-5.0 makes a logfile that contains:
        # -DVARIABLE=\"some value\" and --target=x86_64-linux-gnu
        #
        # The define is passed to the analyzer properly.

        build_action = log_parser.parse_log(load_json_or_empty(logfile))[0]

        self.assertEqual(build_action.source, r'/tmp/a.cpp')
        self.assertEqual(len(build_action.analyzer_options), 1)
        self.assertTrue(len(build_action.target) > 0)
        self.assertEqual(build_action.analyzer_options[0],
                         r'-DVARIABLE="\"some value"\"')

        # Test source file with spaces.
        logfile = os.path.join(self.__test_files, "intercept-old-space.json")

        build_action = log_parser.parse_log(load_json_or_empty(logfile))[0]

        self.assertEqual(build_action.source, r'/tmp/a\ b.cpp')
        self.assertEqual(build_action.lang, 'c++')
Exemplo n.º 3
0
    def test_new_intercept_build(self):
        """
        Test log file parsing escapes with upstream (GitHub) intercept-build.
        """
        logfile = os.path.join(self.__test_files, "intercept-new.json")

        # Upstream scan-build-py creates an argument vector, as opposed to a
        # command string. This argument vector contains the define as it's
        # element in the following format:
        # -DVARIABLE=\"some value\"
        # and the target triplet, e.g.:
        # --target=x86_64-linux-gnu
        #
        # The define is passed to the analyzer properly.

        build_action = log_parser.parse_log(load_json_or_empty(logfile))[0]

        self.assertEqual(build_action.source, r'/tmp/a.cpp')
        self.assertEqual(len(build_action.analyzer_options), 1)
        self.assertTrue(len(build_action.target) > 0)
        self.assertEqual(build_action.analyzer_options[0],
                         r'-DVARIABLE="\"some value"\"')

        # Test source file with spaces.
        logfile = os.path.join(self.__test_files, "intercept-new-space.json")

        build_action = log_parser.parse_log(load_json_or_empty(logfile))[0]

        self.assertEqual(build_action.source, r'/tmp/a\ b.cpp')
        self.assertEqual(build_action.lang, 'c++')
Exemplo n.º 4
0
    def test_omit_preproc(self):
        """
        Compiler preprocessor actions should be omitted.
        """
        preprocessor_actions = [{
            "directory": "/tmp",
            "command": "g++ /tmp/a.cpp -c /tmp/a.cpp",
            "file": "/tmp/a.cpp"
        }, {
            "directory": "/tmp",
            "command": "g++ /tmp/a.cpp -E /tmp/a.cpp",
            "file": "/tmp/a.cpp"
        }, {
            "directory": "/tmp",
            "command": "g++ /tmp/a.cpp -MT /tmp/a.cpp",
            "file": "/tmp/a.cpp"
        }, {
            "directory": "/tmp",
            "command": "g++ /tmp/a.cpp -MM /tmp/a.cpp",
            "file": "/tmp/a.cpp"
        }, {
            "directory": "/tmp",
            "command": "g++ /tmp/a.cpp -MF /tmp/a.cpp",
            "file": "/tmp/a.cpp"
        }, {
            "directory": "/tmp",
            "command": "g++ /tmp/a.cpp -M /tmp/a.cpp",
            "file": "/tmp/a.cpp"
        }]

        build_actions = log_parser.parse_log(preprocessor_actions)
        self.assertEqual(len(build_actions), 1)
        self.assertTrue('-M' not in build_actions[0].original_command)
        self.assertTrue('-E' not in build_actions[0].original_command)
        self.assertTrue('-c' in build_actions[0].original_command)
Exemplo n.º 5
0
    def test_analyzer_exec_double_quote(self):
        """
        Test the process execution by the analyzer,
        If the escaping fails the source file will not compile.
        """
        compile_cmd = self.compiler + \
            ' -DDEBUG \'-DMYPATH="/this/some/path/"\''

        comp_actions = log_parser.parse_log(self.__get_cmp_json(compile_cmd))

        for comp_action in comp_actions:
            cmd = [self.compiler]
            cmd.extend(comp_action.analyzer_options)
            cmd.append(str(comp_action.source))
            cwd = comp_action.directory

            print(cmd)
            print(cwd)

            ret_val, stdout, stderr = analyzer_base.SourceAnalyzer \
                .run_proc(' '.join(cmd), cwd=cwd)

            print(stdout)
            print(stderr)
            self.assertEqual(ret_val, 0)
Exemplo n.º 6
0
    def test_analyzer_ansic_double_quote(self):
        """
        Test the process execution by the analyzer with ansi-C like
        escape characters in it \".
        If the escaping fails the source file will not compile.
        """
        compile_cmd = self.compiler + ''' '-DMYPATH=\"/some/other/path\"' '''
        comp_actions = log_parser.parse_log(self.__get_cmp_json(compile_cmd))

        for comp_action in comp_actions:
            cmd = [self.compiler]
            cmd.extend(comp_action.analyzer_options)
            cmd.append(str(comp_action.source))
            cwd = comp_action.directory

            print(cmd)
            print(cwd)

            ret_val, stdout, stderr = analyzer_base.SourceAnalyzer \
                .run_proc(' '.join(cmd), cwd=cwd)

            print(stdout)
            print(stderr)

            self.assertEqual(ret_val, 0)
Exemplo n.º 7
0
    def test_include_rel_to_abs(self):
        """
        Test working directory prepending to relative include paths.
        """
        logfile = os.path.join(self.__test_files, "include.json")

        build_action = log_parser.parse_log(load_json_or_empty(logfile))[0]

        self.assertEqual(len(build_action.analyzer_options), 4)
        self.assertEqual(build_action.analyzer_options[0], '-I')
        self.assertEqual(build_action.analyzer_options[1], '/include')
        self.assertEqual(build_action.analyzer_options[2], '-I/include')
        self.assertEqual(build_action.analyzer_options[3], '-I/tmp')
Exemplo n.º 8
0
    def test_keep_compile_and_dep(self):
        """ Keep the compile command if -MD is set.
        Dependency generation is done as a side effect of the compilation.
        """
        preprocessor_actions = [{
            "directory": "/tmp",
            "command": "g++ /tmp/a.cpp -MD /tmp/a.cpp",
            "file": "/tmp/a.cpp"
        }]

        build_actions = log_parser.parse_log(preprocessor_actions)
        self.assertEqual(len(build_actions), 1)
        self.assertTrue('-MD' in build_actions[0].original_command)
Exemplo n.º 9
0
    def test_omit_dep_with_e(self):
        """ Skip the compile command if -MD is set together with -E. """

        preprocessor_actions = [{
            "directory": "/tmp",
            "command": "g++ /tmp/a.cpp -MD -E /tmp/a.cpp",
            "file": "/tmp/a.cpp"
        }, {
            "directory": "/tmp",
            "command": "g++ /tmp/a.cpp -E -MD /tmp/a.cpp",
            "file": "/tmp/a.cpp"
        }]

        build_actions = log_parser.parse_log(preprocessor_actions)
        self.assertEqual(len(build_actions), 0)
Exemplo n.º 10
0
    def test_old_ldlogger(self):
        """
        Test log file parsing escape behaviour with pre-2017 Q2 LD-LOGGER.
        """
        logfile = os.path.join(self.__test_files, "ldlogger-old.json")

        # LD-LOGGER before http://github.com/Ericsson/codechecker/pull/631
        # used an escape mechanism that, when parsed by the log parser via
        # shlex, made CodeChecker parse arguments with multiword string
        # literals in them be considered as "file" (instead of compile option),
        # eventually ignored by the command builder, thus lessening analysis
        # accuracy, as defines were lost.
        #
        # Logfile contains "-DVARIABLE="some value"".
        #
        # There is no good way to back-and-forth convert in log_parser or
        # option_parser, so here we aim for a non-failing stalemate of the
        # define being considered a file and ignored, for now.

        build_action = log_parser.parse_log(load_json_or_empty(logfile))[0]

        self.assertEqual(build_action.source, r'/tmp/a.cpp')
        self.assertEqual(len(build_action.analyzer_options), 0)
Exemplo n.º 11
0
def main(args):
    """
    Perform analysis on the given logfiles and store the results in a machine-
    readable format.
    """
    logger.setup_logger(args.verbose if 'verbose' in args else None)

    if len(args.logfile) != 1:
        LOG.warning("Only one log file can be processed right now!")
        sys.exit(1)

    args.output_path = os.path.abspath(args.output_path)
    if os.path.exists(args.output_path) and \
            not os.path.isdir(args.output_path):
        LOG.error("The given output path is not a directory: " +
                  args.output_path)
        sys.exit(1)

    if 'enable_all' in args:
        LOG.info("'--enable-all' was supplied for this analysis.")

    # We clear the output directory in the following cases.
    ctu_dir = os.path.join(args.output_path, 'ctu-dir')
    if 'ctu_phases' in args and args.ctu_phases[0] and \
            os.path.isdir(ctu_dir):
        # Clear the CTU-dir if the user turned on the collection phase.
        LOG.debug("Previous CTU contents have been deleted.")
        shutil.rmtree(ctu_dir)

    if 'clean' in args and os.path.isdir(args.output_path):
        LOG.info(
            "Previous analysis results in '%s' have been removed, "
            "overwriting with current result", args.output_path)
        shutil.rmtree(args.output_path)

    if not os.path.exists(args.output_path):
        os.makedirs(args.output_path)

    LOG.debug("args: " + str(args))
    LOG.debug("Output will be stored to: '" + args.output_path + "'")

    # Process the skip list if present.
    skip_handler = __get_skip_handler(args)

    # Parse the JSON CCDBs and retrieve the compile commands.
    actions = []
    for log_file in args.logfile:
        if not os.path.exists(log_file):
            LOG.error("The specified logfile '%s' does not exist!", log_file)
            continue

        actions += log_parser.parse_log(
            load_json_or_empty(log_file), skip_handler,
            os.path.join(args.output_path, 'compiler_info.json'))
    if not actions:
        LOG.info("None of the specified build log files contained "
                 "valid compilation commands. No analysis needed...")
        sys.exit(1)

    context = analyzer_context.get_context()
    metadata = {
        'action_num': len(actions),
        'command': sys.argv,
        'versions': {
            'codechecker':
            "{0} ({1})".format(context.package_git_tag,
                               context.package_git_hash)
        },
        'working_directory': os.getcwd(),
        'output_path': args.output_path,
        'result_source_files': {}
    }

    if 'name' in args:
        metadata['name'] = args.name

    # Update metadata dictionary with old values.
    metadata_file = os.path.join(args.output_path, 'metadata.json')
    if os.path.exists(metadata_file):
        metadata_prev = load_json_or_empty(metadata_file)
        metadata['result_source_files'] = \
            metadata_prev['result_source_files']

    analyzer.perform_analysis(args, skip_handler, context, actions, metadata)

    __update_skip_file(args)

    LOG.debug("Analysis metadata write to '%s'", metadata_file)
    with open(metadata_file, 'w') as metafile:
        json.dump(metadata, metafile)

    # WARN: store command will search for this file!!!!
    compile_cmd_json = os.path.join(args.output_path, 'compile_cmd.json')
    try:
        source = os.path.abspath(args.logfile[0])
        target = os.path.abspath(compile_cmd_json)

        if source != target:
            shutil.copyfile(source, target)
    except shutil.Error:
        LOG.debug("Compilation database JSON file is the same.")
    except Exception:
        LOG.debug("Copying compilation database JSON file failed.")

    try:
        from codechecker import analyzer_statistics
        analyzer_statistics.collect(metadata, "analyze")
    except Exception:
        pass