Exemplo n.º 1
0
def run_check(args, actions, context):
    """
    Prepare:
    - analyzer config handlers
    - skiplist handling
    - analyzer severity levels

    Stores analysis related data to the database and starts the analysis.
    """

    if args.jobs <= 0:
        args.jobs = 1

    LOG.debug_analyzer("Checking supported analyzers.")
    enabled_analyzers = analyzer_types.check_supported_analyzers(
        args.analyzers, context)

    actions = prepare_actions(actions, enabled_analyzers)

    suppress_file = ''
    try:
        suppress_file = os.path.realpath(args.suppress)
    except AttributeError:
        LOG.debug_analyzer('Suppress file was not set in the command line.')

    # Create one skip list handler shared between the analysis manager workers.
    skip_handler = _get_skip_handler(args)

    with client.get_connection() as connection:
        context.run_id = connection.add_checker_run(' '.join(sys.argv),
                                                    args.name, context.version,
                                                    args.force)

        # Clean previous suppress information.
        client.clean_suppress(connection, context.run_id)

        if os.path.exists(suppress_file):
            client.send_suppress(context.run_id, connection, suppress_file)

        analyzer_config_map = analyzer_types. \
            build_config_handlers(args,
                                  context,
                                  enabled_analyzers,
                                  connection)

        if skip_handler:
            connection.add_skip_paths(context.run_id,
                                      skip_handler.get_skiplist())

    __print_analyzer_version(context, analyzer_config_map)

    LOG.info("Static analysis is starting ...")
    start_time = time.time()

    analysis_manager.start_workers(args, actions, context, analyzer_config_map,
                                   skip_handler)

    end_time = time.time()

    with client.get_connection() as connection:
        connection.finish_checker_run(context.run_id)

    LOG.info("Analysis length: " + str(end_time - start_time) + " sec.")
Exemplo n.º 2
0
def run_check(args, actions, context):
    """
    prepare:
    - analyzer config handlers
    - skiplist handling
    - analyzer severity levels

    stores analysis related data to the database and starts the analysis
    """

    if args.jobs <= 0:
        args.jobs = 1

    LOG.debug_analyzer("Checking supported analyzers.")
    enabled_analyzers = analyzer_types.check_supported_analyzers(
        args.analyzers,
        context)

    # load severity map from config file
    LOG.debug_analyzer("Loading checker severity map.")
    if os.path.exists(context.checkers_severity_map_file):
        with open(context.checkers_severity_map_file, 'r') as sev_conf_file:
            severity_config = sev_conf_file.read()

        context.severity_map = json.loads(severity_config)

    actions = prepare_actions(actions, enabled_analyzers)

    analyzer_config_map = {}

    package_version = context.version['major'] + '.' + context.version['minor']

    suppress_file = ''
    try:
        suppress_file = os.path.realpath(args.suppress)
    except AttributeError:
        LOG.debug_analyzer('Suppress file was not set in the command line')


    # Create one skip list handler shared between the analysis manager workers
    skip_handler = None
    try:
        if args.skipfile:
            LOG.debug_analyzer("Creating skiplist handler.")
            skip_handler = skiplist_handler.SkipListHandler(args.skipfile)
    except AttributeError:
        LOG.debug_analyzer('Skip file was not set in the command line')


    with client.get_connection() as connection:
        context.run_id = connection.add_checker_run(' '.join(sys.argv),
                                                    args.name,
                                                    package_version,
                                                    args.force)

        # clean previous suppress information
        client.clean_suppress(connection, context.run_id)

        if os.path.exists(suppress_file):
            client.send_suppress(context.run_id, connection, suppress_file)

        analyzer_config_map = analyzer_types.build_config_handlers(args,
                                                                   context,
                                                                   enabled_analyzers,
                                                                   connection)
        if skip_handler:
            connection.add_skip_paths(context.run_id,
                                      skip_handler.get_skiplist())

    LOG.info("Static analysis is starting ...")
    start_time = time.time()

    analysis_manager.start_workers(args,
                                   actions,
                                   context,
                                   analyzer_config_map,
                                   skip_handler)

    end_time = time.time()

    with client.get_connection() as connection:
        connection.finish_checker_run(context.run_id)

    LOG.info("Analysis length: " + str(end_time - start_time) + " sec.")
def run_check(args, actions, context):
    """
    prepare:
    - analyzer config handlers
    - skiplist handling
    - analyzer severity levels

    stores analysis related data to the database and starts the analysis
    """

    if args.jobs <= 0:
        args.jobs = 1

    LOG.debug_analyzer("Checking supported analyzers.")
    enabled_analyzers = analyzer_types.check_supported_analyzers(
        args.analyzers, context)

    # load severity map from config file
    LOG.debug_analyzer("Loading checker severity map.")
    if os.path.exists(context.checkers_severity_map_file):
        with open(context.checkers_severity_map_file, 'r') as sev_conf_file:
            severity_config = sev_conf_file.read()

        context.severity_map = json.loads(severity_config)

    actions = prepare_actions(actions, enabled_analyzers)

    analyzer_config_map = {}

    package_version = context.version['major'] + '.' + context.version['minor']

    suppress_file = ''
    try:
        suppress_file = os.path.realpath(args.suppress)
    except AttributeError:
        LOG.debug_analyzer('Suppress file was not set in the command line')

    # Create one skip list handler shared between the analysis manager workers
    skip_handler = None
    try:
        if args.skipfile:
            LOG.debug_analyzer("Creating skiplist handler.")
            skip_handler = skiplist_handler.SkipListHandler(args.skipfile)
    except AttributeError:
        LOG.debug_analyzer('Skip file was not set in the command line')

    with client.get_connection() as connection:
        context.run_id = connection.add_checker_run(' '.join(sys.argv),
                                                    args.name, package_version,
                                                    args.force)

        # clean previous suppress information
        client.clean_suppress(connection, context.run_id)

        if os.path.exists(suppress_file):
            client.send_suppress(context.run_id, connection, suppress_file)

        analyzer_config_map = analyzer_types.build_config_handlers(
            args, context, enabled_analyzers, connection)
        if skip_handler:
            connection.add_skip_paths(context.run_id,
                                      skip_handler.get_skiplist())

    LOG.info("Static analysis is starting ...")
    start_time = time.time()

    analysis_manager.start_workers(args, actions, context, analyzer_config_map,
                                   skip_handler)

    end_time = time.time()

    with client.get_connection() as connection:
        connection.finish_checker_run(context.run_id)

    LOG.info("Analysis length: " + str(end_time - start_time) + " sec.")
Exemplo n.º 4
0
def handle_check(args):
    """ Check mode. """

    if not host_check.check_zlib():
        LOG.error("zlib error")
        sys.exit(1)

    args.workspace = os.path.realpath(args.workspace)
    if not os.path.isdir(args.workspace):
        os.mkdir(args.workspace)

    log_file = ""
    if args.logfile:
        log_file = os.path.realpath(args.logfile)
        if not os.path.exists(args.logfile):
            LOG.info("Log file does not exists.")
            return

    context = generic_package_context.get_context()
    context.codechecker_workspace = args.workspace
    context.db_username = args.dbusername


    check_env = analyzer_env.get_check_env(context.path_env_extra,
                                             context.ld_lib_path_extra)

    compiler_bin = context.compiler_bin
    if not host_check.check_clang(compiler_bin, check_env):
        sys.exit(1)


    #load severity map from config file
    if os.path.exists(context.checkers_severity_map_file):
        with open(context.checkers_severity_map_file, 'r') as sev_conf_file:
            severity_config = sev_conf_file.read()

        context.severity_map = json.loads(severity_config)

    if args.command:
        # check if logger bin exists
        if not os.path.isfile(context.path_logger_bin):
            LOG.debug('Logger binary not found! Required for logging.')
            sys.exit(1)

        # check if logger lib exists
        if not os.path.exists(context.path_logger_lib):
            LOG.debug('Logger library directory not found! Libs are requires for logging.')
            sys.exit(1)


        log_file = os.path.join(context.codechecker_workspace, \
                                context.build_log_file_name)
        if os.path.exists(log_file):
            os.remove(log_file)
        open(log_file, 'a').close() # same as linux's touch
        perform_build_command(log_file, args.command, context)


    try:
        actions = log_parser.parse_log(log_file)
    except Exception as ex:
        LOG.error(ex)
        sys.exit(1)

    if not actions:
        LOG.warning('There are no build actions in the log file.')
        sys.exit(1)

    setup_connection_manager_db(args)
    client.ConnectionManager.port = util.get_free_port()

    if args.jobs <= 0:
        args.jobs = 1

    package_version = context.version['major'] + '.' + context.version['minor']
    suppress_file = os.path.join(args.workspace, package_version) \
                            if not args.suppress \
                            else os.path.realpath(args.suppress)

    send_suppress = False
    if os.path.exists(suppress_file):
        send_suppress = True

    client.ConnectionManager.run_env = check_env

    client.ConnectionManager.start_server(args.dbname, context)

    LOG.debug("Checker server started.")

    with client.get_connection() as connection:
        try:
            context.run_id = connection.add_checker_run(' '.join(sys.argv), \
                                        args.name, package_version, args.update)
        except shared.ttypes.RequestFailed as thrift_ex:
            if 'violates unique constraint "runs_name_key"' not in thrift_ex.message:
                # not the unique name was the problem
                raise
            else:
                LOG.info("Name was already used in the database please choose another unique name for checking.")
                sys.exit(1)

        if send_suppress:
            client.send_suppress(connection, suppress_file)

        #static_analyzer.clean = args.clean
        if args.clean:
            #cleaning up previous results
            LOG.debug("Cleaning previous plist files in "+ \
                                context.codechecker_workspace)
            plist_files = glob.glob(os.path.join(context.codechecker_workspace,'*.plist'))
            for pf in plist_files:
                os.remove(pf)

        report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name)
        if not os.path.exists(report_output):
            os.mkdir(report_output)

        static_analyzer = analyzer.StaticAnalyzer(context)
        static_analyzer.workspace = report_output

        # first add checkers from config file
        static_analyzer.checkers = context.default_checkers

        # add user defined checkers
        try:
            static_analyzer.checkers = args.ordered_checker_args
        except AttributeError as aerr:
            LOG.debug('No checkers were defined in the command line')

        if args.configfile:
            static_analyzer.add_config(connection, args.configfile)
        # else:
            # add default config from package
            # static_analyzer.add_config(connection, context.checkers_config_file)

        if args.skipfile:
            static_analyzer.add_skip(connection, os.path.realpath(args.skipfile))


    LOG.info("Static analysis is starting..")
    start_time = time.time()

    LOG.debug("Starting workers...")
    start_workers(static_analyzer, actions, args.jobs, context)

    end_time = time.time()

    with client.get_connection() as connection:
        connection.finish_checker_run()

    LOG.info("Analysis length: " + str(end_time - start_time) + " sec.")
    LOG.info("Analysis has finished.")
Exemplo n.º 5
0
def handle_check(args):
    """ Check mode. """

    if not host_check.check_zlib():
        LOG.error("zlib error")
        sys.exit(1)

    args.workspace = os.path.realpath(args.workspace)
    if not os.path.isdir(args.workspace):
        os.mkdir(args.workspace)

    context = generic_package_context.get_context()
    context.codechecker_workspace = args.workspace
    context.db_username = args.dbusername

    check_env = analyzer_env.get_check_env(context.path_env_extra,
                                           context.ld_lib_path_extra)

    compiler_bin = context.compiler_bin
    if not host_check.check_clang(compiler_bin, check_env):
        sys.exit(1)

    # load severity map from config file
    if os.path.exists(context.checkers_severity_map_file):
        with open(context.checkers_severity_map_file, 'r') as sev_conf_file:
            severity_config = sev_conf_file.read()

        context.severity_map = json.loads(severity_config)

    log_file = _check_generate_log_file(args, context)
    try:
        actions = log_parser.parse_log(log_file)
    except Exception as ex:
        LOG.error(ex)
        sys.exit(1)

    if not actions:
        LOG.warning('There are no build actions in the log file.')
        sys.exit(1)

    sql_server = SQLServer.from_cmdline_args(args,
                                             context.codechecker_workspace,
                                             context.migration_root,
                                             check_env)
    conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port())

    if args.jobs <= 0:
        args.jobs = 1

    package_version = context.version['major'] + '.' + context.version['minor']
    suppress_file = os.path.join(args.workspace, package_version) \
                       if not args.suppress \
                       else os.path.realpath(args.suppress)

    send_suppress = False
    if os.path.exists(suppress_file):
        send_suppress = True

    sql_server.start(wait_for_start=True, init=True)
    conn_mgr.start_report_server(context.db_version_info)

    LOG.debug("Checker server started.")

    with client.get_connection() as connection:
        try:
            context.run_id = connection.add_checker_run(' '.join(sys.argv),
                                                        args.name, package_version, args.update)
        except shared.ttypes.RequestFailed as thrift_ex:
            if 'violates unique constraint "uq_runs_name"' not in thrift_ex.message:
                # not the unique name was the problem
                raise
            else:
                LOG.info("Name was already used in the database please choose another unique name for checking.")
                sys.exit(1)

        if args.update:
            # clean previous suppress information
            client.clean_suppress(connection, context.run_id)

        if send_suppress:
            client.send_suppress(connection, suppress_file)

        report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name)
        if not os.path.exists(report_output):
            os.mkdir(report_output)

        static_analyzer = analyzer.StaticAnalyzer(context)
        static_analyzer.workspace = report_output

        # first add checkers from config file
        static_analyzer.checkers = context.default_checkers

        # add user defined checkers
        try:
            static_analyzer.checkers = args.ordered_checker_args
        except AttributeError:
            LOG.debug('No checkers were defined in the command line')

        if args.configfile:
            static_analyzer.add_config(connection, args.configfile)
        # else:
            # add default config from package
            # static_analyzer.add_config(connection, context.checkers_config_file)

        if args.skipfile:
            static_analyzer.add_skip(connection, os.path.realpath(args.skipfile))

    LOG.info("Static analysis is starting..")
    start_time = time.time()

    LOG.debug("Starting workers...")
    start_workers(static_analyzer, actions, args.jobs, context)

    end_time = time.time()

    with client.get_connection() as connection:
        connection.finish_checker_run()

    LOG.info("Analysis length: " + str(end_time - start_time) + " sec.")

    if not args.keep_tmp:
        LOG.debug("Removing plist files in " +
                  context.codechecker_workspace)
        plist_files = glob.glob(os.path.join(report_output, '*.plist'))
        for pf in plist_files:
            os.remove(pf)

    LOG.info("Analysis has finished.")