def handle_debug(args): """ Runs a debug command on the buildactions where the analysis failed for some reason. """ context = generic_package_context.get_context() try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() context.codechecker_workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) sql_server.start(context.db_version_info, wait_for_start=True, init=False) debug_reporter.debug(context, sql_server.get_connection_string(), args.force)
def handle_debug(args): """ Runs a debug command on the buildactions where the analysis failed for some reason """ context = generic_package_context.get_context() try: workspace = args.workspace except AttributeError: # if no workspace value was set for some reason # in args set the default value workspace = util.get_default_workspace() context.codechecker_workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) sql_server.start(context.db_version_info, wait_for_start=True, init=False) debug_reporter.debug(context, sql_server.get_connection_string(), args.force)
def _do_quickcheck(args): """ Handles the "quickcheck" command. For arguments see main function in CodeChecker.py. It also requires an extra property in args object, namely workspace which is a directory path as a string. This function is called from handle_quickcheck. """ try: context = generic_package_context.get_context() context.codechecker_workspace = args.workspace args.name = "quickcheck" # Load severity map from config file. if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_file: severity_config = sev_file.read() context.severity_map = json.loads(severity_config) log_file = build_manager.check_log_file(args, context) actions = log_parser.parse_log(log_file, args.add_compiler_defaults) analyzer.run_quick_check(args, context, actions) except Exception as ex: LOG.error("Running quickcheck failed.") finally: if not args.keep_tmp: if log_file: LOG.debug('Removing temporary log file: ' + log_file) os.remove(log_file)
def handle_check(args): """ Runs the original build and logs the buildactions. Based on the log runs the analysis. """ try: if not host_check.check_zlib(): sys.exit(1) args.workspace = os.path.abspath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername log_file = build_manager.check_log_file(args, context) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.exit(1) actions = log_parser.parse_log(log_file, args.add_compiler_defaults) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() LOG.debug("Checker server started.") analyzer.run_check(args, actions, context) LOG.info("Analysis has finished.") log_startserver_hint(args) except Exception as ex: LOG.error(ex) import traceback print(traceback.format_exc()) finally: if not args.keep_tmp: if log_file: LOG.debug('Removing temporary log file: ' + log_file) os.remove(log_file)
def handle_check(args): """ Runs the original build and logs the buildactions. Based on the log runs the analysis. """ try: if not host_check.check_zlib(): sys.exit(1) args.workspace = os.path.abspath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername log_file = build_manager.check_log_file(args, context) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.exit(1) actions = log_parser.parse_log(log_file, args.add_compiler_defaults) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() LOG.debug("Checker server started.") analyzer.run_check(args, actions, context) LOG.info("Analysis has finished.") log_startserver_hint(args) except Exception as ex: LOG.error(ex) import traceback print(traceback.format_exc()) finally: if not args.keep_tmp: if log_file: LOG.debug('Removing temporary log file: ' + log_file) os.remove(log_file)
def _do_quickcheck(args): """ Handles the "quickcheck" command. For arguments see main function in CodeChecker.py. It also requires an extra property in args object, namely workspace which is a directory path as a string. This function is called from handle_quickcheck. """ try: context = generic_package_context.get_context() context.codechecker_workspace = args.workspace args.name = "quickcheck" # Load severity map from config file. if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_file: severity_config = sev_file.read() context.severity_map = json.loads(severity_config) log_file = build_manager.check_log_file(args, context) actions = log_parser.parse_log(log_file, args.add_compiler_defaults) analyzer.run_quick_check(args, context, actions) except Exception as ex: LOG.error("Running quickcheck failed.") finally: if not args.keep_tmp: if log_file: LOG.debug('Removing temporary log file: ' + log_file) os.remove(log_file)
def _do_quickcheck(args): """ Handles the "quickcheck" command. For arguments see main function in CodeChecker.py. It also requires an extra property in args object, namely workspace which is a directory path as a string. This function is called from handle_quickcheck. """ try: context = generic_package_context.get_context() context.codechecker_workspace = args.workspace args.name = "quickcheck" log_file, set_in_cmdline = build_manager.check_log_file(args, context) actions = log_parser.parse_log(log_file, args.add_compiler_defaults) analyzer.run_quick_check(args, context, actions) except Exception as ex: LOG.error("Running quickcheck failed.") finally: if not args.keep_tmp: if log_file and not set_in_cmdline: LOG.debug('Removing temporary log file: ' + log_file) os.remove(log_file)
def handle_log(args): """ Log mode. """ args.logfile = os.path.realpath(args.logfile) if os.path.exists(args.logfile): os.remove(args.logfile) context = generic_package_context.get_context() open(args.logfile, 'a').close() # same as linux's touch perform_build_command(args.logfile, args.command, context)
def handle_log(args): """ Generates a build log by running the original build command. No analysis is done. """ args.logfile = os.path.realpath(args.logfile) if os.path.exists(args.logfile): os.remove(args.logfile) context = generic_package_context.get_context() build_manager.perform_build_command(args.logfile, args.command, context)
def handle_log(args): """ Generates a build log by running the original build command No analysis is done """ args.logfile = os.path.realpath(args.logfile) if os.path.exists(args.logfile): os.remove(args.logfile) context = generic_package_context.get_context() open(args.logfile, 'a').close() # same as linux's touch build_manager.perform_build_command(args.logfile, args.command, context)
def handle_log(args): """ Generates a build log by running the original build command. No analysis is done. """ args.logfile = os.path.realpath(args.logfile) if os.path.exists(args.logfile): os.remove(args.logfile) context = generic_package_context.get_context() build_manager.perform_build_command(args.logfile, args.command, context)
def handle_list_checkers(args): """ List the supported checkers by the analyzers. List the default enabled and disabled checkers in the config. """ context = generic_package_context.get_context() enabled_analyzers = args.analyzers analyzer_environment = analyzer_env.get_check_env( context.path_env_extra, context.ld_lib_path_extra) if not enabled_analyzers: # Noting set list checkers for all supported analyzers. enabled_analyzers = list(analyzer_types.supported_analyzers) enabled_analyzer_types = set() for ea in enabled_analyzers: if ea not in analyzer_types.supported_analyzers: LOG.info('Not supported analyzer ' + str(ea)) sys.exit(1) else: enabled_analyzer_types.add(ea) analyzer_config_map = \ analyzer_types.build_config_handlers(args, context, enabled_analyzer_types) for ea in enabled_analyzers: # Get the config. config_handler = analyzer_config_map.get(ea) source_analyzer = \ analyzer_types.construct_analyzer_type(ea, config_handler, None) checkers = source_analyzer.get_analyzer_checkers(config_handler, analyzer_environment) default_checker_cfg = context.default_checkers_config.get( ea + '_checkers') analyzer_types.initialize_checkers(config_handler, checkers, default_checker_cfg) for checker_name, value in config_handler.checks().items(): enabled, description = value if enabled: print(' + {0:50} {1}'.format(checker_name, description)) else: print(' - {0:50} {1}'.format(checker_name, description))
def _do_quickcheck(args): ''' Handles the "quickcheck" command. For arguments see main function in CodeChecker.py. It also requires an extra property in args object, namely workspace which is a directory path as a string. This function is called from handle_quickcheck. ''' context = generic_package_context.get_context() context.codechecker_workspace = args.workspace check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) # load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) log_file = _check_generate_log_file(args, context, silent=True) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = args.workspace static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError: LOG.debug('No checkers were defined in the command line') for action in actions: analyzer.run_quick_check(static_analyzer, action, print_steps=args.print_steps)
def _do_quickcheck(args): """ Handles the "quickcheck" command. For arguments see main function in CodeChecker.py. It also requires an extra property in args object, namely workspace which is a directory path as a string. This function is called from handle_quickcheck. """ context = generic_package_context.get_context() try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() context.codechecker_workspace = workspace args.jobs = 1 args.name = "quickcheck" # Load severity map from config file. if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) log_file = build_manager.check_log_file(args) if not log_file: log_file = build_manager.generate_log_file(args, context, args.quiet_build) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.exit(1) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) analyzer.run_quick_check(args, context, actions)
def handle_log(args): """ Generates a build log by running the original build command No analysis is done """ args.logfile = os.path.realpath(args.logfile) if os.path.exists(args.logfile): os.remove(args.logfile) context = generic_package_context.get_context() open(args.logfile, 'a').close() # same as linux's touch build_manager.perform_build_command(args.logfile, args.command, context)
def _do_quickcheck(args): """ Handles the "quickcheck" command. For arguments see main function in CodeChecker.py. It also requires an extra property in args object, namely workspace which is a directory path as a string. This function is called from handle_quickcheck. """ context = generic_package_context.get_context() try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() context.codechecker_workspace = workspace args.name = "quickcheck" # Load severity map from config file. if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) log_file = build_manager.check_log_file(args) if not log_file: log_file = build_manager.generate_log_file(args, context, args.quiet_build) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.exit(1) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) analyzer.run_quick_check(args, context, actions)
def handle_list_checkers(args): """ List the supported checkers by the analyzers. List the default enabled and disabled checkers in the config. """ context = generic_package_context.get_context() enabled_analyzers = args.analyzers analyzer_environment = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) if not enabled_analyzers: # Noting set list checkers for all supported analyzers. enabled_analyzers = list(analyzer_types.supported_analyzers) enabled_analyzer_types = set() for ea in enabled_analyzers: if ea not in analyzer_types.supported_analyzers: LOG.info('Not supported analyzer ' + str(ea)) sys.exit(1) else: enabled_analyzer_types.add(ea) analyzer_config_map = \ analyzer_types.build_config_handlers(args, context, enabled_analyzer_types) for ea in enabled_analyzers: # Get the config. config_handler = analyzer_config_map.get(ea) source_analyzer = analyzer_types.construct_analyzer_type(ea, config_handler, None) checkers = source_analyzer.get_analyzer_checkers(config_handler, analyzer_environment) default_checker_cfg = context.default_checkers_config.get( ea + '_checkers') analyzer_types.initialize_checkers(config_handler, checkers, default_checker_cfg) for checker_name, value in config_handler.checks().items(): enabled, description = value if enabled: print(' + {0:50} {1}'.format(checker_name, description)) else: print(' - {0:50} {1}'.format(checker_name, description))
def handle_debug(args): context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) sql_server.start(wait_for_start=True, init=False) debug_reporter.debug(context, sql_server.get_connection_string(), args.force)
def handle_plist(args): context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername if not args.stdout: args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() with client.get_connection() as connection: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, context.version, args.force) pool = multiprocessing.Pool(args.jobs) try: items = [(plist, args, context) for plist in os.listdir(args.directory)] pool.map_async(consume_plist, items, 1).get(float('inf')) pool.close() except Exception: pool.terminate() raise finally: pool.join() if not args.stdout: log_startserver_hint(args)
def handle_plist(args): context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername if not args.stdout: args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() with client.get_connection() as connection: context.run_id = connection.add_checker_run( ' '.join(sys.argv), args.name, context.version, args.force) pool = multiprocessing.Pool(args.jobs) try: items = [(plist, args, context) for plist in os.listdir(args.directory)] pool.map_async(consume_plist, items, 1).get(float('inf')) pool.close() except Exception: pool.terminate() raise finally: pool.join() if not args.stdout: log_startserver_hint(args)
def handle_debug(args): setup_connection_manager_db(args) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) client.ConnectionManager.run_env = check_env client.ConnectionManager.start_postgres(context) client.ConnectionManager.block_until_db_start_proc_free(context) debug_reporter.debug(context, args.dbusername, args.dbaddress, args.dbport, args.dbname, args.force)
def handle_version_info(args): """ Get and print the version information from the version config file and thrift API versions. """ context = generic_package_context.get_context() print('Base package version: \t' + context.version).expandtabs(30) print('Package build date: \t' + context.package_build_date).expandtabs(30) print('Git hash: \t' + context.package_git_hash).expandtabs(30) print('DB schema version: \t' + str(context.db_version_info)).expandtabs(30) # Thift api version for the clients. from codeCheckerDBAccess import constants print(('Thrift client api version: \t' + constants.API_VERSION).expandtabs(30))
def handle_version_info(args): """ Get and print the version information from the version config file and thrift API versions. """ context = generic_package_context.get_context() print('Base package version: \t' + context.version).expandtabs(30) print('Package build date: \t' + context.package_build_date).expandtabs(30) print('Git hash: \t' + context.package_git_hash).expandtabs(30) print('DB schema version: \t' + str(context.db_version_info)).expandtabs(30) # Thift api version for the clients. from codeCheckerDBAccess import constants print(('Thrift client api version: \t' + constants.API_VERSION). expandtabs(30))
def handle_list_checkers(args): context = generic_package_context.get_context() static_analyzer = analyzer.StaticAnalyzer(context) LOG.info(static_analyzer.get_checker_list()) # Print default ENABLED checkers LOG.info("CHECKERS ENABLED BY DEFAULT:") enabledCheckers = filter(lambda x: x[1], context.default_checkers) for checker_name, _ in enabledCheckers: print(' ' + checker_name) print('') # Print default DISABLED checkers LOG.info("CHECKERS DISABLED BY DEFAULT:") disabledCheckers = filter(lambda x: not x[1], context.default_checkers) for checker_name, _ in disabledCheckers: print(' ' + checker_name) print('')
def handle_version_info(args): """ Get and print the version information from the version config file and thrift API versions. """ context = generic_package_context.get_context() version_file = context.version_file try: with open(version_file) as v_file: v_data = v_file.read() version_data = json.loads(v_data) base_version = version_data['version']['major'] + \ '.' + version_data['version']['minor'] db_schema_version = version_data['db_version']['major'] + \ '.' + version_data['db_version']['minor'] print('Base package version: \t' + base_version).expandtabs(30) print('Package build date: \t' + version_data['package_build_date']).expandtabs(30) print('Git hash: \t' + version_data['git_hash']).expandtabs(30) print('DB schema version: \t' + db_schema_version).expandtabs(30) except ValueError as verr: LOG.error('Failed to decode version information from the config file.') LOG.error(verr) sys.exit(1) except IOError as ioerr: LOG.error('Failed to read version config file: ' + version_file) LOG.error(ioerr) sys.exit(1) # Thift api version for the clients. from codeCheckerDBAccess import constants print('Thrift client api version: \t' + constants.API_VERSION).\ expandtabs(30)
def handle_version_info(args): """ Get and print the version information from the version config file and thrift API versions. """ context = generic_package_context.get_context() version_file = context.version_file try: with open(version_file) as v_file: v_data = v_file.read() version_data = json.loads(v_data) base_version = version_data['version']['major'] + \ '.' + version_data['version']['minor'] db_schema_version = version_data['db_version']['major'] + \ '.' + version_data['db_version']['minor'] print('Base package version: \t' + base_version).expandtabs(30) print('Package build date: \t' + version_data['package_build_date']).expandtabs(30) print('Git hash: \t' + version_data['git_hash']).expandtabs(30) print('DB schema version: \t' + db_schema_version).expandtabs(30) except ValueError as verr: LOG.error('Failed to decode version information from the config file.') LOG.error(verr) sys.exit(1) except IOError as ioerr: LOG.error('Failed to read version config file: ' + version_file) LOG.error(ioerr) sys.exit(1) # Thift api version for the clients. from codeCheckerDBAccess import constants print('Thrift client api version: \t' + constants.API_VERSION).\ expandtabs(30)
def handle_server(args): """ Starts the report viewer server. """ if not host_check.check_zlib(): sys.exit(1) workspace = args.workspace # WARNING # In case of SQLite args.dbaddress default value is used # for which the is_localhost should return true. if util.is_localhost(args.dbaddress) and not os.path.exists(workspace): os.makedirs(workspace) suppress_handler = generic_package_suppress_handler.\ GenericSuppressHandler() if args.suppress is None: LOG.warning('No suppress file was given, suppressed results will ' 'be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file ' + args.suppress + ' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = workspace session_manager.SessionManager.CodeChecker_Workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting CodeChecker server and database server.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() else: LOG.debug('Starting database.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) # Start database viewer. db_connection_string = sql_server.get_connection_string() suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data = { 'www_root': context.www_root, 'doc_root': context.doc_root, 'checker_md_docs': checker_md_docs, 'checker_md_docs_map': checker_md_docs_map } client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_server(args): if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) check_options_validity(args) if args.suppress is None: LOG.warning('WARNING! No suppress file was given, suppressed results will be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file '+args.suppress+' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting codechecker server and database server.') sql_server.start(wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) else: LOG.debug('Starting database.') sql_server.start(wait_for_start=True, init=True) # start database viewer db_connection_string = sql_server.get_connection_string() suppress_handler = generic_package_suppress_handler.GenericSuppressHandler() suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) package_data = {} package_data['www_root'] = context.www_root package_data['doc_root'] = context.doc_root checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_check(args): """ Check mode. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) # load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) log_file = _check_generate_log_file(args, context) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) if args.jobs <= 0: args.jobs = 1 package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = os.path.join(args.workspace, package_version) \ if not args.suppress \ else os.path.realpath(args.suppress) send_suppress = False if os.path.exists(suppress_file): send_suppress = True sql_server.start(wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) LOG.debug("Checker server started.") with client.get_connection() as connection: try: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, package_version, args.update) except shared.ttypes.RequestFailed as thrift_ex: if 'violates unique constraint "uq_runs_name"' not in thrift_ex.message: # not the unique name was the problem raise else: LOG.info("Name was already used in the database please choose another unique name for checking.") sys.exit(1) if args.update: # clean previous suppress information client.clean_suppress(connection, context.run_id) if send_suppress: client.send_suppress(connection, suppress_file) report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name) if not os.path.exists(report_output): os.mkdir(report_output) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = report_output # first add checkers from config file static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError: LOG.debug('No checkers were defined in the command line') if args.configfile: static_analyzer.add_config(connection, args.configfile) # else: # add default config from package # static_analyzer.add_config(connection, context.checkers_config_file) if args.skipfile: static_analyzer.add_skip(connection, os.path.realpath(args.skipfile)) LOG.info("Static analysis is starting..") start_time = time.time() LOG.debug("Starting workers...") start_workers(static_analyzer, actions, args.jobs, context) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run() LOG.info("Analysis length: " + str(end_time - start_time) + " sec.") if not args.keep_tmp: LOG.debug("Removing plist files in " + context.codechecker_workspace) plist_files = glob.glob(os.path.join(report_output, '*.plist')) for pf in plist_files: os.remove(pf) LOG.info("Analysis has finished.")
def handle_server(args): """ Starts the report viewer server. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() # WARNING # In case of SQLite args.dbaddress default value is used # for which the is_localhost should return true. local_db = util.is_localhost(args.dbaddress) if local_db and not os.path.exists(workspace): os.makedirs(workspace) if args.suppress is None: LOG.warning( "WARNING! No suppress file was given, suppressed results will " + 'be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file ' + args.suppress + ' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = workspace session_manager.SessionManager.CodeChecker_Workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting CodeChecker server and database server.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() else: LOG.debug('Starting database.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) # Start database viewer. db_connection_string = sql_server.get_connection_string() suppress_handler = generic_package_suppress_handler.GenericSuppressHandler() try: suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) except AttributeError as aerr: # Suppress file was not set. LOG.debug(aerr) package_data = {'www_root': context.www_root, 'doc_root': context.doc_root} checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_check(args): """ Runs the original build and logs the buildactions. Based on the log runs the analysis. """ try: if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() workspace = os.path.realpath(workspace) if not os.path.isdir(workspace): os.mkdir(workspace) context = generic_package_context.get_context() context.codechecker_workspace = workspace context.db_username = args.dbusername log_file = build_manager.check_log_file(args) if not log_file: log_file = build_manager.generate_log_file(args, context, args.quiet_build) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.exit(1) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() LOG.debug("Checker server started.") analyzer.run_check(args, actions, context) LOG.info("Analysis has finished.") db_data = "" if args.postgresql: db_data += " --postgresql" \ + " --dbname " + args.dbname \ + " --dbport " + str(args.dbport) \ + " --dbusername " + args.dbusername LOG.info("To view results run:\nCodeChecker server -w " + workspace + db_data) except Exception as ex: LOG.error(ex) import traceback print(traceback.format_exc())
def handle_list_checkers(args): context = generic_package_context.get_context() static_analyzer = analyzer.StaticAnalyzer(context) LOG.info(static_analyzer.get_checker_list())
def handle_server(args): """ starts the report viewer server """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # if no workspace value was set for some reason # in args set the default value workspace = util.get_default_workspace() # WARNING # in case of SQLite args.dbaddress default value is used # for which the is_localhost should return true local_db = util.is_localhost(args.dbaddress) if local_db and not os.path.exists(workspace): os.makedirs(workspace) if args.suppress is None: LOG.warning( 'WARNING! No suppress file was given, suppressed results will be only stored in the database.' ) else: if not os.path.exists(args.suppress): LOG.error('Suppress file ' + args.suppress + ' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting codechecker server and database server.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) else: LOG.debug('Starting database.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) # start database viewer db_connection_string = sql_server.get_connection_string() suppress_handler = generic_package_suppress_handler.GenericSuppressHandler( ) try: suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) except AttributeError as aerr: # suppress file was not set LOG.debug(aerr) package_data = {} package_data['www_root'] = context.www_root package_data['doc_root'] = context.doc_root checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_check(args): """ Runs the original build and logs the buildactions Based on the log runs the analysis """ try: if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # if no workspace value was set for some reason # in args set the default value workspace = util.get_default_workspace() workspace = os.path.realpath(workspace) if not os.path.isdir(workspace): os.mkdir(workspace) context = generic_package_context.get_context() context.codechecker_workspace = workspace context.db_username = args.dbusername log_file = build_manager.check_log_file(args) if not log_file: log_file = build_manager.generate_log_file(args, context) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.ecit(1) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) LOG.debug("Checker server started.") analyzer.run_check(args, actions, context) LOG.info("Analysis has finished.") db_data = "" if args.postgresql: db_data += " --postgresql" \ + " --dbname " + args.dbname \ + " --dbport " + str(args.dbport) \ + " --dbusername " + args.dbusername LOG.info("To view results run:\nCodeChecker server -w " + workspace + db_data) except Exception as ex: LOG.error(ex) import traceback print(traceback.format_exc())
def handle_server(args): if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) check_options_validity(args) if args.suppress is None: LOG.warning('WARNING! No suppress file was given, suppressed results will be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file '+args.suppress+' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername setup_connection_manager_db(args) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) client.ConnectionManager.run_env = check_env if args.check_port: LOG.debug('Starting codechecker server and postgres.') client.ConnectionManager.host = args.check_address client.ConnectionManager.port = args.check_port client.ConnectionManager.run_env = check_env # starts posgres client.ConnectionManager.start_server(args.dbname, context) else: LOG.debug('Starting postgres.') client.ConnectionManager.start_postgres(context, init_db=False) client.ConnectionManager.block_until_db_start_proc_free(context) # start database viewer db_connection_string = 'postgresql://'+args.dbusername+ \ '@'+args.dbaddress+ \ ':'+str(args.dbport)+ \ '/'+args.dbname suppress_handler = generic_package_suppress_handler.GenericSuppressHandler() suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) package_data = {} package_data['www_root'] = context.www_root package_data['doc_root'] = context.doc_root checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_check(args): """ Check mode. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) log_file = "" if args.logfile: log_file = os.path.realpath(args.logfile) if not os.path.exists(args.logfile): LOG.info("Log file does not exists.") return context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) #load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) if args.command: # check if logger bin exists if not os.path.isfile(context.path_logger_bin): LOG.debug('Logger binary not found! Required for logging.') sys.exit(1) # check if logger lib exists if not os.path.exists(context.path_logger_lib): LOG.debug('Logger library directory not found! Libs are requires for logging.') sys.exit(1) log_file = os.path.join(context.codechecker_workspace, \ context.build_log_file_name) if os.path.exists(log_file): os.remove(log_file) open(log_file, 'a').close() # same as linux's touch perform_build_command(log_file, args.command, context) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) setup_connection_manager_db(args) client.ConnectionManager.port = util.get_free_port() if args.jobs <= 0: args.jobs = 1 package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = os.path.join(args.workspace, package_version) \ if not args.suppress \ else os.path.realpath(args.suppress) send_suppress = False if os.path.exists(suppress_file): send_suppress = True client.ConnectionManager.run_env = check_env client.ConnectionManager.start_server(args.dbname, context) LOG.debug("Checker server started.") with client.get_connection() as connection: try: context.run_id = connection.add_checker_run(' '.join(sys.argv), \ args.name, package_version, args.update) except shared.ttypes.RequestFailed as thrift_ex: if 'violates unique constraint "runs_name_key"' not in thrift_ex.message: # not the unique name was the problem raise else: LOG.info("Name was already used in the database please choose another unique name for checking.") sys.exit(1) if send_suppress: client.send_suppress(connection, suppress_file) #static_analyzer.clean = args.clean if args.clean: #cleaning up previous results LOG.debug("Cleaning previous plist files in "+ \ context.codechecker_workspace) plist_files = glob.glob(os.path.join(context.codechecker_workspace,'*.plist')) for pf in plist_files: os.remove(pf) report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name) if not os.path.exists(report_output): os.mkdir(report_output) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = report_output # first add checkers from config file static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError as aerr: LOG.debug('No checkers were defined in the command line') if args.configfile: static_analyzer.add_config(connection, args.configfile) # else: # add default config from package # static_analyzer.add_config(connection, context.checkers_config_file) if args.skipfile: static_analyzer.add_skip(connection, os.path.realpath(args.skipfile)) LOG.info("Static analysis is starting..") start_time = time.time() LOG.debug("Starting workers...") start_workers(static_analyzer, actions, args.jobs, context) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run() LOG.info("Analysis length: " + str(end_time - start_time) + " sec.") LOG.info("Analysis has finished.")