def check_supported_analyzers(analyzers, context): """ Check if the selected analyzers are supported. """ check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) analyzer_binaries = context.analyzer_binaries enabled_analyzers = set() for analyzer_name in analyzers: if analyzer_name not in supported_analyzers: LOG.error('Unsupported analyzer ' + analyzer_name + ' !') sys.exit(1) # Get the compiler binary to check if it can run. available_analyzer = True analyzer_bin = analyzer_binaries.get(analyzer_name) if not analyzer_bin: LOG.debug_analyzer('Failed to detect analyzer binary ' + analyzer_name) available_analyzer = False if not host_check.check_clang(analyzer_bin, check_env): LOG.warning('Failed to run analyzer ' + analyzer_name + ' !') available_analyzer = False if available_analyzer: enabled_analyzers.add(analyzer_name) return enabled_analyzers
def check_supported_analyzers(analyzers, context): """ check if the selected analyzers are supported """ check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) analyzer_binaries = context.analyzer_binaries enabled_analyzers = set() for analyzer_name in analyzers: if analyzer_name not in supported_analyzers: LOG.error('Unsupported analyzer ' + analyzer_name + ' !') sys.exit(1) else: # get the compiler binary to check if it can run available_analyzer = True analyzer_bin = analyzer_binaries.get(analyzer_name) if not analyzer_bin: LOG.debug_analyzer('Failed to detect analyzer binary ' + analyzer_name) available_analyzer = False if not host_check.check_clang(analyzer_bin, check_env): LOG.warning('Failed to run analyzer ' + analyzer_name + ' !') available_analyzer = False if available_analyzer: enabled_analyzers.add(analyzer_name) return enabled_analyzers
def _do_quickcheck(args): ''' Handles the "quickcheck" command. For arguments see main function in CodeChecker.py. It also requires an extra property in args object, namely workspace which is a directory path as a string. This function is called from handle_quickcheck. ''' context = generic_package_context.get_context() context.codechecker_workspace = args.workspace check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) # load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) log_file = _check_generate_log_file(args, context, silent=True) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = args.workspace static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError: LOG.debug('No checkers were defined in the command line') for action in actions: analyzer.run_quick_check(static_analyzer, action, print_steps=args.print_steps)
def handle_check(args): """ Check mode. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) log_file = "" if args.logfile: log_file = os.path.realpath(args.logfile) if not os.path.exists(args.logfile): LOG.info("Log file does not exists.") return context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) #load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) if args.command: # check if logger bin exists if not os.path.isfile(context.path_logger_bin): LOG.debug('Logger binary not found! Required for logging.') sys.exit(1) # check if logger lib exists if not os.path.exists(context.path_logger_lib): LOG.debug('Logger library directory not found! Libs are requires for logging.') sys.exit(1) log_file = os.path.join(context.codechecker_workspace, \ context.build_log_file_name) if os.path.exists(log_file): os.remove(log_file) open(log_file, 'a').close() # same as linux's touch perform_build_command(log_file, args.command, context) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) setup_connection_manager_db(args) client.ConnectionManager.port = util.get_free_port() if args.jobs <= 0: args.jobs = 1 package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = os.path.join(args.workspace, package_version) \ if not args.suppress \ else os.path.realpath(args.suppress) send_suppress = False if os.path.exists(suppress_file): send_suppress = True client.ConnectionManager.run_env = check_env client.ConnectionManager.start_server(args.dbname, context) LOG.debug("Checker server started.") with client.get_connection() as connection: try: context.run_id = connection.add_checker_run(' '.join(sys.argv), \ args.name, package_version, args.update) except shared.ttypes.RequestFailed as thrift_ex: if 'violates unique constraint "runs_name_key"' not in thrift_ex.message: # not the unique name was the problem raise else: LOG.info("Name was already used in the database please choose another unique name for checking.") sys.exit(1) if send_suppress: client.send_suppress(connection, suppress_file) #static_analyzer.clean = args.clean if args.clean: #cleaning up previous results LOG.debug("Cleaning previous plist files in "+ \ context.codechecker_workspace) plist_files = glob.glob(os.path.join(context.codechecker_workspace,'*.plist')) for pf in plist_files: os.remove(pf) report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name) if not os.path.exists(report_output): os.mkdir(report_output) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = report_output # first add checkers from config file static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError as aerr: LOG.debug('No checkers were defined in the command line') if args.configfile: static_analyzer.add_config(connection, args.configfile) # else: # add default config from package # static_analyzer.add_config(connection, context.checkers_config_file) if args.skipfile: static_analyzer.add_skip(connection, os.path.realpath(args.skipfile)) LOG.info("Static analysis is starting..") start_time = time.time() LOG.debug("Starting workers...") start_workers(static_analyzer, actions, args.jobs, context) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run() LOG.info("Analysis length: " + str(end_time - start_time) + " sec.") LOG.info("Analysis has finished.")
def handle_check(args): """ Check mode. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) # load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) log_file = _check_generate_log_file(args, context) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) if args.jobs <= 0: args.jobs = 1 package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = os.path.join(args.workspace, package_version) \ if not args.suppress \ else os.path.realpath(args.suppress) send_suppress = False if os.path.exists(suppress_file): send_suppress = True sql_server.start(wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) LOG.debug("Checker server started.") with client.get_connection() as connection: try: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, package_version, args.update) except shared.ttypes.RequestFailed as thrift_ex: if 'violates unique constraint "uq_runs_name"' not in thrift_ex.message: # not the unique name was the problem raise else: LOG.info("Name was already used in the database please choose another unique name for checking.") sys.exit(1) if args.update: # clean previous suppress information client.clean_suppress(connection, context.run_id) if send_suppress: client.send_suppress(connection, suppress_file) report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name) if not os.path.exists(report_output): os.mkdir(report_output) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = report_output # first add checkers from config file static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError: LOG.debug('No checkers were defined in the command line') if args.configfile: static_analyzer.add_config(connection, args.configfile) # else: # add default config from package # static_analyzer.add_config(connection, context.checkers_config_file) if args.skipfile: static_analyzer.add_skip(connection, os.path.realpath(args.skipfile)) LOG.info("Static analysis is starting..") start_time = time.time() LOG.debug("Starting workers...") start_workers(static_analyzer, actions, args.jobs, context) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run() LOG.info("Analysis length: " + str(end_time - start_time) + " sec.") if not args.keep_tmp: LOG.debug("Removing plist files in " + context.codechecker_workspace) plist_files = glob.glob(os.path.join(report_output, '*.plist')) for pf in plist_files: os.remove(pf) LOG.info("Analysis has finished.")