def check_supported_analyzers(analyzers, context): """ check if the selected analyzers are supported """ check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) analyzer_binaries = context.analyzer_binaries enabled_analyzers = set() for analyzer_name in analyzers: if analyzer_name not in supported_analyzers: LOG.error('Unsupported analyzer ' + analyzer_name + ' !') sys.exit(1) else: # get the compiler binary to check if it can run available_analyzer = True analyzer_bin = analyzer_binaries.get(analyzer_name) if not analyzer_bin: LOG.debug_analyzer('Failed to detect analyzer binary ' + analyzer_name) available_analyzer = False if not host_check.check_clang(analyzer_bin, check_env): LOG.warning('Failed to run analyzer ' + analyzer_name + ' !') available_analyzer = False if available_analyzer: enabled_analyzers.add(analyzer_name) return enabled_analyzers
def handle_check(args): """ Runs the original build and logs the buildactions. Based on the log runs the analysis. """ try: if not host_check.check_zlib(): sys.exit(1) args.workspace = os.path.abspath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername log_file = build_manager.check_log_file(args, context) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.exit(1) actions = log_parser.parse_log(log_file, args.add_compiler_defaults) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() LOG.debug("Checker server started.") analyzer.run_check(args, actions, context) LOG.info("Analysis has finished.") log_startserver_hint(args) except Exception as ex: LOG.error(ex) import traceback print(traceback.format_exc()) finally: if not args.keep_tmp: if log_file: LOG.debug('Removing temporary log file: ' + log_file) os.remove(log_file)
def handle_debug(args): """ Runs a debug command on the buildactions where the analysis failed for some reason. """ context = generic_package_context.get_context() try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() context.codechecker_workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) sql_server.start(context.db_version_info, wait_for_start=True, init=False) debug_reporter.debug(context, sql_server.get_connection_string(), args.force)
def _do_quickcheck(args): ''' Handles the "quickcheck" command. For arguments see main function in CodeChecker.py. It also requires an extra property in args object, namely workspace which is a directory path as a string. This function is called from handle_quickcheck. ''' context = generic_package_context.get_context() context.codechecker_workspace = args.workspace check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) # load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) log_file = _check_generate_log_file(args, context, silent=True) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = args.workspace static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError: LOG.debug('No checkers were defined in the command line') for action in actions: analyzer.run_quick_check(static_analyzer, action, print_steps=args.print_steps)
def handle_list_checkers(args): """ List the supported checkers by the analyzers. List the default enabled and disabled checkers in the config. """ context = generic_package_context.get_context() enabled_analyzers = args.analyzers analyzer_environment = analyzer_env.get_check_env( context.path_env_extra, context.ld_lib_path_extra) if not enabled_analyzers: # Noting set list checkers for all supported analyzers. enabled_analyzers = list(analyzer_types.supported_analyzers) enabled_analyzer_types = set() for ea in enabled_analyzers: if ea not in analyzer_types.supported_analyzers: LOG.info('Not supported analyzer ' + str(ea)) sys.exit(1) else: enabled_analyzer_types.add(ea) analyzer_config_map = \ analyzer_types.build_config_handlers(args, context, enabled_analyzer_types) for ea in enabled_analyzers: # Get the config. config_handler = analyzer_config_map.get(ea) source_analyzer = \ analyzer_types.construct_analyzer_type(ea, config_handler, None) checkers = source_analyzer.get_analyzer_checkers(config_handler, analyzer_environment) default_checker_cfg = context.default_checkers_config.get( ea + '_checkers') analyzer_types.initialize_checkers(config_handler, checkers, default_checker_cfg) for checker_name, value in config_handler.checks().items(): enabled, description = value if enabled: print(' + {0:50} {1}'.format(checker_name, description)) else: print(' - {0:50} {1}'.format(checker_name, description))
def debug(context, dbusername, dbhost, dbport, dbname, force): try: connection_string = 'postgres://' + dbusername + '@' + dbhost + ':' + \ str(dbport) + '/' + dbname engine = sqlalchemy.create_engine(connection_string) print(connection_string) session = sqlalchemy.orm.scoped_session( sqlalchemy.orm.sessionmaker(bind=engine)) # Get latest run id last_run = session.query(Run).order_by(Run.id.desc()).first() # Get all failed actions actions = session.query(BuildAction).filter(and_( BuildAction.run_id == last_run.id, sqlalchemy.sql.func.length(BuildAction.failure_txt) != 0)) debug_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) crash_handler = analyzer_crash_handler.AnalyzerCrashHandler(context, debug_env) dumps_dir = context.dump_output_dir if not os.path.exists(dumps_dir): os.mkdir(dumps_dir) for action in actions: LOG.info('Processing action ' + str(action.id) + '.') debug_log_file = \ os.path.join(dumps_dir, get_dump_file_name(last_run.id, action.id)) if not force and os.path.exists(debug_log_file): LOG.info('This file already exists.') continue LOG.info('Generating stacktrace with gdb.') gdb_result = \ crash_handler.get_crash_info(str(action.check_cmd).split()) LOG.info('Writing gdb result to file.') with open(debug_log_file, 'w') as log_file: log_file.write(gdb_result) LOG.info('All new debug files are placed in ' + dumps_dir) except KeyboardInterrupt as kb_exc: LOG.error(str(kb_exc)) sys.exit(1)
def handle_debug(args): context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) sql_server.start(wait_for_start=True, init=False) debug_reporter.debug(context, sql_server.get_connection_string(), args.force)
def __build_clangsa_config_handler(args, context): """ Build the config handler for clang static analyzer Handle config options from the command line and config files """ config_handler = config_handler_clangsa.ClangSAConfigHandler() config_handler.analyzer_plugins_dir = context.checker_plugin config_handler.analyzer_binary = context.analyzer_binaries.get(CLANG_SA) config_handler.compiler_resource_dirs = context.compiler_resource_dirs config_handler.compiler_sysroot = context.compiler_sysroot config_handler.system_includes = context.extra_system_includes config_handler.includes = context.extra_includes try: with open(args.clangsa_args_cfg_file, 'rb') as sa_cfg: config_handler.analyzer_extra_arguments = \ re.sub('\$\((.*?)\)', __replace_env_var(args.clangsa_args_cfg_file), sa_cfg.read().strip()) except IOError as ioerr: LOG.debug_analyzer(ioerr) except AttributeError as aerr: # no clangsa arguments file was given in the command line LOG.debug_analyzer(aerr) analyzer = construct_analyzer_type(CLANG_SA, config_handler, None) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) checkers = analyzer.get_analyzer_checkers(config_handler, check_env) # read clang-tidy checkers from the config file clang_sa_checkers = context.default_checkers_config.get(CLANG_SA + '_checkers') try: cmdline_checkers = args.ordered_checkers except AttributeError: LOG.debug_analyzer('No checkers were defined in the command line for' + CLANG_SA) cmdline_checkers = None initialize_checkers(config_handler, checkers, clang_sa_checkers, cmdline_checkers) return config_handler
def handle_plist(args): context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername if not args.stdout: args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() with client.get_connection() as connection: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, context.version, args.force) pool = multiprocessing.Pool(args.jobs) try: items = [(plist, args, context) for plist in os.listdir(args.directory)] pool.map_async(consume_plist, items, 1).get(float('inf')) pool.close() except Exception: pool.terminate() raise finally: pool.join() if not args.stdout: log_startserver_hint(args)
def handle_debug(args): setup_connection_manager_db(args) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) client.ConnectionManager.run_env = check_env client.ConnectionManager.start_postgres(context) client.ConnectionManager.block_until_db_start_proc_free(context) debug_reporter.debug(context, args.dbusername, args.dbaddress, args.dbport, args.dbname, args.force)
def __print_analyzer_version(context, analyzer_config_map): """ Print the path and the version of the analyzer binary. """ check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) # Get the analyzer binaries from the config_map which # contains only the checked and available analyzers. for analyzer_name, analyzer_cfg in analyzer_config_map.items(): LOG.info("Using analyzer:") analyzer_bin = analyzer_cfg.analyzer_binary print(analyzer_bin) version = [analyzer_bin, u' --version'] try: subprocess.call(shlex.split(' '.join(version)), env=check_env) except OSError as oerr: LOG.warning("Failed to get analyzer version: " + ' '.join(version)) LOG.warning(oerr.strerror)
def __init__(self, context): self._context = context # Load all plugin from plugin directory plugin_dir = os.path.join(context.package_root, context.checker_plugin) self._plugins = [os.path.join(plugin_dir, f) for f in os.listdir(plugin_dir) if os.path.isfile(os.path.join(plugin_dir, f))] self._config = None self._skip = [] self._workspace = '' self._mode = 'plist-multi-file' self._env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) self._checkers_list = [] self._disabled_checkers = set() self._disabled_checkers = self._disabled_checkers.union( self._context.env_disabled_checkers) self._cmd = [] self._cmd.append(self._context.compiler_bin) # if logger.get_log_level() == logger.DEBUG: # self._cmd.append('-v') if len(self._context.compiler_resource_dirs) > 0: for inc_dir in self._context.compiler_resource_dirs: self._cmd.append('-resource-dir') self._cmd.append(inc_dir) self._cmd.append('-isystem') self._cmd.append(inc_dir) self._cmd.append('-c') # self._cmd.append('-Xclang') self._cmd.append('--analyze') # turn off clang hardcoded checkers list self._cmd.append('--analyzer-no-default-checks') for plugin in self._plugins: self._cmd.append("-Xclang") self._cmd.append("-load") self._cmd.append("-Xclang") self._cmd.append(plugin) if self._plugins: self._cmd.append('-Xclang') self._cmd.append('-plugin') self._cmd.append('-Xclang') self._cmd.append('checkercfg') self._cmd.append('-Xclang') self._cmd.append('-analyzer-opt-analyze-headers') self._cmd.append('-Xclang') self._cmd.append('-analyzer-output=' + self._mode) if self._context.compiler_sysroot: self._cmd.append('--sysroot') self._cmd.append(self._context.compiler_sysroot) for path in self._context.extra_system_includes: self._cmd.append('-isystem') self._cmd.append(path) for path in self._context.extra_includes: self._cmd.append('-I') self._cmd.append(path)
def debug(context, connection_string, force): try: engine = database_handler.SQLServer.create_engine(connection_string) session = sqlalchemy.orm.scoped_session( sqlalchemy.orm.sessionmaker(bind=engine)) # Get latest run id. last_run = session.query(Run).order_by(Run.id.desc()).first() # Get all failed actions. actions = session.query(BuildAction).filter(and_( BuildAction.run_id == last_run.id, sqlalchemy.sql.func.length(BuildAction.failure_txt) != 0)) debug_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) crash_handler = analyzer_crash_handler.AnalyzerCrashHandler(context, debug_env) dumps_dir = context.dump_output_dir if not os.path.exists(dumps_dir): os.mkdir(dumps_dir) LOG.info('Generating gdb dump files to : ' + dumps_dir) for action in actions: LOG.info('Processing action ' + str(action.id) + '.') debug_log_file = \ os.path.join(dumps_dir, get_dump_file_name(last_run.id, action.id)) if not force and os.path.exists(debug_log_file): LOG.info('This file already exists.') continue LOG.info('Generating stacktrace with gdb.') gdb_result = \ crash_handler.get_crash_info(str(action.check_cmd).split()) LOG.info('Writing debug info to file.') with open(debug_log_file, 'w') as log_file: log_file.write('========================\n') log_file.write('Build command hash: \n') log_file.write('========================\n') log_file.write(action.build_cmd_hash + '\n') log_file.write('===============\n') log_file.write('Check command: \n') log_file.write('===============\n') log_file.write(action.check_cmd + '\n') log_file.write('==============\n') log_file.write('Failure text: \n') log_file.write('==============\n') log_file.write(action.failure_txt + '\n') log_file.write('==========\n') log_file.write('GDB info: \n') log_file.write('==========\n') log_file.write(gdb_result) LOG.info('All new debug files are placed in ' + dumps_dir) except KeyboardInterrupt as kb_exc: LOG.error(str(kb_exc)) sys.exit(1)
def handle_check(args): """ Check mode. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) # load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) log_file = _check_generate_log_file(args, context) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) if args.jobs <= 0: args.jobs = 1 package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = os.path.join(args.workspace, package_version) \ if not args.suppress \ else os.path.realpath(args.suppress) send_suppress = False if os.path.exists(suppress_file): send_suppress = True sql_server.start(wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) LOG.debug("Checker server started.") with client.get_connection() as connection: try: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, package_version, args.update) except shared.ttypes.RequestFailed as thrift_ex: if 'violates unique constraint "uq_runs_name"' not in thrift_ex.message: # not the unique name was the problem raise else: LOG.info("Name was already used in the database please choose another unique name for checking.") sys.exit(1) if args.update: # clean previous suppress information client.clean_suppress(connection, context.run_id) if send_suppress: client.send_suppress(connection, suppress_file) report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name) if not os.path.exists(report_output): os.mkdir(report_output) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = report_output # first add checkers from config file static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError: LOG.debug('No checkers were defined in the command line') if args.configfile: static_analyzer.add_config(connection, args.configfile) # else: # add default config from package # static_analyzer.add_config(connection, context.checkers_config_file) if args.skipfile: static_analyzer.add_skip(connection, os.path.realpath(args.skipfile)) LOG.info("Static analysis is starting..") start_time = time.time() LOG.debug("Starting workers...") start_workers(static_analyzer, actions, args.jobs, context) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run() LOG.info("Analysis length: " + str(end_time - start_time) + " sec.") if not args.keep_tmp: LOG.debug("Removing plist files in " + context.codechecker_workspace) plist_files = glob.glob(os.path.join(report_output, '*.plist')) for pf in plist_files: os.remove(pf) LOG.info("Analysis has finished.")
def handle_server(args): """ Starts the report viewer server. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() # WARNING # In case of SQLite args.dbaddress default value is used # for which the is_localhost should return true. local_db = util.is_localhost(args.dbaddress) if local_db and not os.path.exists(workspace): os.makedirs(workspace) if args.suppress is None: LOG.warning( "WARNING! No suppress file was given, suppressed results will " + 'be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file ' + args.suppress + ' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = workspace session_manager.SessionManager.CodeChecker_Workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting CodeChecker server and database server.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() else: LOG.debug('Starting database.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) # Start database viewer. db_connection_string = sql_server.get_connection_string() suppress_handler = generic_package_suppress_handler.GenericSuppressHandler() try: suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) except AttributeError as aerr: # Suppress file was not set. LOG.debug(aerr) package_data = {'www_root': context.www_root, 'doc_root': context.doc_root} checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_check(args): """ Runs the original build and logs the buildactions. Based on the log runs the analysis. """ try: if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() workspace = os.path.realpath(workspace) if not os.path.isdir(workspace): os.mkdir(workspace) context = generic_package_context.get_context() context.codechecker_workspace = workspace context.db_username = args.dbusername log_file = build_manager.check_log_file(args) if not log_file: log_file = build_manager.generate_log_file(args, context, args.quiet_build) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.exit(1) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() LOG.debug("Checker server started.") analyzer.run_check(args, actions, context) LOG.info("Analysis has finished.") db_data = "" if args.postgresql: db_data += " --postgresql" \ + " --dbname " + args.dbname \ + " --dbport " + str(args.dbport) \ + " --dbusername " + args.dbusername LOG.info("To view results run:\nCodeChecker server -w " + workspace + db_data) except Exception as ex: LOG.error(ex) import traceback print(traceback.format_exc())
def check(check_data): """ Invoke clang with an action which called by processes. Different analyzer object belongs to for each build action. skiplist handler is None if no skip file was configured """ args, action, context, analyzer_config_map, skp_handler, \ report_output_dir, use_db = check_data skipped = False try: # If one analysis fails the check fails. return_codes = 0 skipped = False for source in action.sources: # If there is no skiplist handler there was no skip list file # in the command line. # C++ file skipping is handled here. _, source_file_name = ntpath.split(source) if skp_handler and skp_handler.should_skip(source): LOG.debug_analyzer(source_file_name + ' is skipped') skipped = True continue # Construct analyzer env. analyzer_environment = analyzer_env.get_check_env( context.path_env_extra, context.ld_lib_path_extra) run_id = context.run_id rh = analyzer_types.construct_result_handler(args, action, run_id, report_output_dir, context.severity_map, skp_handler, use_db) # Create a source analyzer. source_analyzer = \ analyzer_types.construct_analyzer(action, analyzer_config_map) # Source is the currently analyzed source file # there can be more in one buildaction. source_analyzer.source_file = source # Fills up the result handler with the analyzer information. source_analyzer.analyze(rh, analyzer_environment) if rh.analyzer_returncode == 0: # Analysis was successful processing results. if rh.analyzer_stdout != '': LOG.debug_analyzer('\n' + rh.analyzer_stdout) if rh.analyzer_stderr != '': LOG.debug_analyzer('\n' + rh.analyzer_stderr) rh.postprocess_result() rh.handle_results() LOG.info("[%d/%d] %s analyzed %s successfully." % (progress_checked_num.value, progress_actions.value, action.analyzer_type, source_file_name)) else: # Analysis failed. LOG.error('Analyzing ' + source_file_name + ' with ' + action.analyzer_type + ' failed.') if rh.analyzer_stdout != '': LOG.error(rh.analyzer_stdout) if rh.analyzer_stderr != '': LOG.error(rh.analyzer_stderr) return_codes = rh.analyzer_returncode if not args.keep_tmp: rh.clean_results() progress_checked_num.value += 1 return return_codes, skipped, action.analyzer_type except Exception as e: LOG.debug_analyzer(str(e)) traceback.print_exc(file=sys.stdout) return 1, skipped, action.analyzer_type
def check(check_data): """ Invoke clang with an action which called by processes. Different analyzer object belongs to for each build action. skiplist handler is None if no skip file was configured. """ args, action, context, analyzer_config_map, skp_handler, \ report_output_dir, use_db = check_data skipped = False try: # If one analysis fails the check fails. return_codes = 0 skipped = False for source in action.sources: # If there is no skiplist handler there was no skip list file # in the command line. # C++ file skipping is handled here. _, source_file_name = ntpath.split(source) if skp_handler and skp_handler.should_skip(source): LOG.debug_analyzer(source_file_name + ' is skipped') skipped = True continue # Construct analyzer env. analyzer_environment = analyzer_env.get_check_env( context.path_env_extra, context.ld_lib_path_extra) run_id = context.run_id rh = analyzer_types.construct_result_handler(args, action, run_id, report_output_dir, context.severity_map, skp_handler, progress_lock, use_db) # Create a source analyzer. source_analyzer = \ analyzer_types.construct_analyzer(action, analyzer_config_map) # Source is the currently analyzed source file # there can be more in one buildaction. source_analyzer.source_file = source # Fills up the result handler with the analyzer information. source_analyzer.analyze(rh, analyzer_environment) if rh.analyzer_returncode == 0: # Analysis was successful processing results. if rh.analyzer_stdout != '': LOG.debug_analyzer('\n' + rh.analyzer_stdout) if rh.analyzer_stderr != '': LOG.debug_analyzer('\n' + rh.analyzer_stderr) rh.postprocess_result() rh.handle_results() LOG.info("[%d/%d] %s analyzed %s successfully." % (progress_checked_num.value, progress_actions.value, action.analyzer_type, source_file_name)) else: # Analysis failed. LOG.error('Analyzing ' + source_file_name + ' with ' + action.analyzer_type + ' failed.') if rh.analyzer_stdout != '': LOG.error(rh.analyzer_stdout) if rh.analyzer_stderr != '': LOG.error(rh.analyzer_stderr) return_codes = rh.analyzer_returncode if not args.keep_tmp: rh.clean_results() progress_checked_num.value += 1 return return_codes, skipped, action.analyzer_type except Exception as e: LOG.debug_analyzer(str(e)) traceback.print_exc(file=sys.stdout) return 1, skipped, action.analyzer_type
def handle_check(args): """ Runs the original build and logs the buildactions. Based on the log runs the analysis. """ try: if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() workspace = os.path.realpath(workspace) if not os.path.isdir(workspace): os.mkdir(workspace) context = generic_package_context.get_context() context.codechecker_workspace = workspace context.db_username = args.dbusername log_file = build_manager.check_log_file(args) if not log_file: log_file = build_manager.generate_log_file(args, context, args.quiet_build) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.exit(1) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) LOG.debug("Checker server started.") analyzer.run_check(args, actions, context) LOG.info("Analysis has finished.") db_data = "" if args.postgresql: db_data += " --postgresql" \ + " --dbname " + args.dbname \ + " --dbport " + str(args.dbport) \ + " --dbusername " + args.dbusername LOG.info("To view results run:\nCodeChecker server -w " + workspace + db_data) except Exception as ex: LOG.error(ex) import traceback print(traceback.format_exc())
def handle_server(args): """ Starts the report viewer server. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() # WARNING # In case of SQLite args.dbaddress default value is used # for which the is_localhost should return true. local_db = util.is_localhost(args.dbaddress) if local_db and not os.path.exists(workspace): os.makedirs(workspace) if args.suppress is None: LOG.warning( "WARNING! No suppress file was given, suppressed results will " + 'be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file ' + args.suppress + ' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = workspace session_manager.SessionManager.CodeChecker_Workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting codechecker server and database server.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) else: LOG.debug('Starting database.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) # Start database viewer. db_connection_string = sql_server.get_connection_string() suppress_handler = generic_package_suppress_handler.GenericSuppressHandler() try: suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) except AttributeError as aerr: # Suppress file was not set. LOG.debug(aerr) package_data = {'www_root': context.www_root, 'doc_root': context.doc_root} checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_server(args): if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) check_options_validity(args) if args.suppress is None: LOG.warning('WARNING! No suppress file was given, suppressed results will be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file '+args.suppress+' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername setup_connection_manager_db(args) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) client.ConnectionManager.run_env = check_env if args.check_port: LOG.debug('Starting codechecker server and postgres.') client.ConnectionManager.host = args.check_address client.ConnectionManager.port = args.check_port client.ConnectionManager.run_env = check_env # starts posgres client.ConnectionManager.start_server(args.dbname, context) else: LOG.debug('Starting postgres.') client.ConnectionManager.start_postgres(context, init_db=False) client.ConnectionManager.block_until_db_start_proc_free(context) # start database viewer db_connection_string = 'postgresql://'+args.dbusername+ \ '@'+args.dbaddress+ \ ':'+str(args.dbport)+ \ '/'+args.dbname suppress_handler = generic_package_suppress_handler.GenericSuppressHandler() suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) package_data = {} package_data['www_root'] = context.www_root package_data['doc_root'] = context.doc_root checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_check(args): """ Check mode. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) log_file = "" if args.logfile: log_file = os.path.realpath(args.logfile) if not os.path.exists(args.logfile): LOG.info("Log file does not exists.") return context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) #load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) if args.command: # check if logger bin exists if not os.path.isfile(context.path_logger_bin): LOG.debug('Logger binary not found! Required for logging.') sys.exit(1) # check if logger lib exists if not os.path.exists(context.path_logger_lib): LOG.debug('Logger library directory not found! Libs are requires for logging.') sys.exit(1) log_file = os.path.join(context.codechecker_workspace, \ context.build_log_file_name) if os.path.exists(log_file): os.remove(log_file) open(log_file, 'a').close() # same as linux's touch perform_build_command(log_file, args.command, context) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) setup_connection_manager_db(args) client.ConnectionManager.port = util.get_free_port() if args.jobs <= 0: args.jobs = 1 package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = os.path.join(args.workspace, package_version) \ if not args.suppress \ else os.path.realpath(args.suppress) send_suppress = False if os.path.exists(suppress_file): send_suppress = True client.ConnectionManager.run_env = check_env client.ConnectionManager.start_server(args.dbname, context) LOG.debug("Checker server started.") with client.get_connection() as connection: try: context.run_id = connection.add_checker_run(' '.join(sys.argv), \ args.name, package_version, args.update) except shared.ttypes.RequestFailed as thrift_ex: if 'violates unique constraint "runs_name_key"' not in thrift_ex.message: # not the unique name was the problem raise else: LOG.info("Name was already used in the database please choose another unique name for checking.") sys.exit(1) if send_suppress: client.send_suppress(connection, suppress_file) #static_analyzer.clean = args.clean if args.clean: #cleaning up previous results LOG.debug("Cleaning previous plist files in "+ \ context.codechecker_workspace) plist_files = glob.glob(os.path.join(context.codechecker_workspace,'*.plist')) for pf in plist_files: os.remove(pf) report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name) if not os.path.exists(report_output): os.mkdir(report_output) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = report_output # first add checkers from config file static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError as aerr: LOG.debug('No checkers were defined in the command line') if args.configfile: static_analyzer.add_config(connection, args.configfile) # else: # add default config from package # static_analyzer.add_config(connection, context.checkers_config_file) if args.skipfile: static_analyzer.add_skip(connection, os.path.realpath(args.skipfile)) LOG.info("Static analysis is starting..") start_time = time.time() LOG.debug("Starting workers...") start_workers(static_analyzer, actions, args.jobs, context) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run() LOG.info("Analysis length: " + str(end_time - start_time) + " sec.") LOG.info("Analysis has finished.")
def handle_server(args): if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) check_options_validity(args) if args.suppress is None: LOG.warning('WARNING! No suppress file was given, suppressed results will be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file '+args.suppress+' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting codechecker server and database server.') sql_server.start(wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) else: LOG.debug('Starting database.') sql_server.start(wait_for_start=True, init=True) # start database viewer db_connection_string = sql_server.get_connection_string() suppress_handler = generic_package_suppress_handler.GenericSuppressHandler() suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) package_data = {} package_data['www_root'] = context.www_root package_data['doc_root'] = context.doc_root checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)