Exemple #1
0
def __get_skip_handler(args):
    try:
        if args.skipfile:
            LOG.debug_analyzer("Creating skiplist handler.")
            return skiplist_handler.SkipListHandler(args.skipfile)
    except AttributeError:
        LOG.debug_analyzer('Skip file was not set in the command line')
Exemple #2
0
def __get_skip_handler(args):
    """
    Initialize and return a skiplist handler if
    there is a skip list file in the arguments.
    """
    try:
        if args.skipfile:
            LOG.debug_analyzer("Creating skiplist handler.")
            with open(args.skipfile) as skip_file:
                return skiplist_handler.SkipListHandler(skip_file.read())
    except AttributeError:
        LOG.debug_analyzer('Skip file was not set in the command line')
Exemple #3
0
def main(args):
    """
    Store the defect results in the specified input list as bug reports in the
    database.
    """

    if not host_check.check_zlib():
        raise Exception("zlib is not available on the system!")

    # To ensure the help message prints the default folder properly,
    # the 'default' for 'args.input' is a string, not a list.
    # But we need lists for the foreach here to work.
    if isinstance(args.input, str):
        args.input = [args.input]

    if 'name' not in args:
        LOG.debug("Generating name for analysis...")
        generated = __get_run_name(args.input)
        if generated:
            setattr(args, 'name', generated)
        else:
            LOG.error("No suitable name was found in the inputs for the "
                      "analysis run. Please specify one by passing argument "
                      "--name run_name in the invocation.")
            sys.exit(2)  # argparse returns error code 2 for bad invocations.

    LOG.info("Storing analysis results for run '" + args.name + "'")

    if args.force:
        LOG.info("argument --force was specified: the run with name '" +
                 args.name + "' will be deleted.")

    context = generic_package_context.get_context()
    context.db_username = args.dbusername

    check_env = analyzer_env.get_check_env(context.path_env_extra,
                                           context.ld_lib_path_extra)

    sql_server = SQLServer.from_cmdline_args(args, context.migration_root,
                                             check_env)

    conn_mgr = client.ConnectionManager(sql_server, 'localhost',
                                        util.get_free_port())

    sql_server.start(context.db_version_info, wait_for_start=True, init=True)

    conn_mgr.start_report_server()

    original_cwd = os.getcwd()
    empty_metadata = {'working_directory': original_cwd}

    check_commands = []
    check_durations = []
    skip_handlers = []
    items = []
    for input_path in args.input:
        LOG.debug("Parsing input argument: '" + input_path + "'")

        if os.path.isfile(input_path):
            if not input_path.endswith(".plist"):
                continue

            items.append((input_path, context, empty_metadata))
        elif os.path.isdir(input_path):
            metadata_file = os.path.join(input_path, "metadata.json")
            if os.path.exists(metadata_file):
                with open(metadata_file, 'r') as metadata:
                    metadata_dict = json.load(metadata)
                    LOG.debug(metadata_dict)

                    if 'command' in metadata_dict:
                        check_commands.append(metadata_dict['command'])
                    if 'timestamps' in metadata_dict:
                        check_durations.append(
                            float(metadata_dict['timestamps']['end'] -
                                  metadata_dict['timestamps']['begin']))
                    if 'skip_data' in metadata_dict:
                        # Save previously stored skip data for sending to the
                        # database, to ensure skipped headers are actually
                        # skipped --- 'analyze' can't do this.
                        handle, path = tempfile.mkstemp()
                        with os.fdopen(handle, 'w') as tmpf:
                            tmpf.write('\n'.join(metadata_dict['skip_data']))

                        skip_handlers.append(
                            skiplist_handler.SkipListHandler(path))
                        os.remove(path)
            else:
                metadata_dict = empty_metadata

            _, _, files = next(os.walk(input_path), ([], [], []))
            for f in files:
                if not f.endswith(".plist"):
                    continue

                items.append((os.path.join(input_path,
                                           f), context, metadata_dict))

    with client.get_connection() as connection:
        if len(check_commands) == 0:
            command = ' '.join(sys.argv)
        elif len(check_commands) == 1:
            command = ' '.join(check_commands[0])
        else:
            command = "multiple analyze calls: " +\
                      '; '.join([' '.join(com) for com in check_commands])

        context.run_id = connection.add_checker_run(command, args.name,
                                                    context.version,
                                                    args.force)

        # Clean previous suppress information.
        client.clean_suppress(connection, context.run_id)

        if 'suppress' in args:
            if not os.path.isfile(args.suppress):
                LOG.warning("Suppress file '" + args.suppress + "' given, but "
                            "it does not exist -- will not suppress anything.")
            else:
                client.send_suppress(context.run_id, connection,
                                     os.path.realpath(args.suppress))

        # Send previously collected skip information to the server.
        for skip_handler in skip_handlers:
            connection.add_skip_paths(context.run_id,
                                      skip_handler.get_skiplist())

    pool = multiprocessing.Pool(args.jobs)

    try:
        pool.map_async(consume_plist, items, 1).get(float('inf'))
        pool.close()
    except Exception:
        pool.terminate()
        LOG.error("Storing the results failed.")
        raise  # CodeChecker.py is the invoker, it will handle this.
    finally:
        pool.join()
        os.chdir(original_cwd)

        with client.get_connection() as connection:
            connection.finish_checker_run(context.run_id)

            if len(check_durations) > 0:
                connection.set_run_duration(
                    context.run_id,
                    # Round the duration to seconds.
                    int(sum(check_durations)))