def handle_check(args): """ Runs the original build and logs the buildactions. Based on the log runs the analysis. """ try: if not host_check.check_zlib(): sys.exit(1) args.workspace = os.path.abspath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername log_file = build_manager.check_log_file(args, context) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.exit(1) actions = log_parser.parse_log(log_file, args.add_compiler_defaults) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() LOG.debug("Checker server started.") analyzer.run_check(args, actions, context) LOG.info("Analysis has finished.") log_startserver_hint(args) except Exception as ex: LOG.error(ex) import traceback print(traceback.format_exc()) finally: if not args.keep_tmp: if log_file: LOG.debug('Removing temporary log file: ' + log_file) os.remove(log_file)
def handle_server(args): """ Starts the report viewer server. """ if not host_check.check_zlib(): sys.exit(1) workspace = args.workspace # WARNING # In case of SQLite args.dbaddress default value is used # for which the is_localhost should return true. if util.is_localhost(args.dbaddress) and not os.path.exists(workspace): os.makedirs(workspace) suppress_handler = generic_package_suppress_handler.\ GenericSuppressHandler() if args.suppress is None: LOG.warning('No suppress file was given, suppressed results will ' 'be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file ' + args.suppress + ' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = workspace session_manager.SessionManager.CodeChecker_Workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting CodeChecker server and database server.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() else: LOG.debug('Starting database.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) # Start database viewer. db_connection_string = sql_server.get_connection_string() suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data = { 'www_root': context.www_root, 'doc_root': context.doc_root, 'checker_md_docs': checker_md_docs, 'checker_md_docs_map': checker_md_docs_map } client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_server(args): """ Starts the report viewer server. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() # WARNING # In case of SQLite args.dbaddress default value is used # for which the is_localhost should return true. local_db = util.is_localhost(args.dbaddress) if local_db and not os.path.exists(workspace): os.makedirs(workspace) if args.suppress is None: LOG.warning( "WARNING! No suppress file was given, suppressed results will " + 'be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file ' + args.suppress + ' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = workspace session_manager.SessionManager.CodeChecker_Workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting CodeChecker server and database server.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() else: LOG.debug('Starting database.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) # Start database viewer. db_connection_string = sql_server.get_connection_string() suppress_handler = generic_package_suppress_handler.GenericSuppressHandler() try: suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) except AttributeError as aerr: # Suppress file was not set. LOG.debug(aerr) package_data = {'www_root': context.www_root, 'doc_root': context.doc_root} checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_check(args): """ Runs the original build and logs the buildactions. Based on the log runs the analysis. """ try: if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # If no workspace value was set for some reason # in args set the default value. workspace = util.get_default_workspace() workspace = os.path.realpath(workspace) if not os.path.isdir(workspace): os.mkdir(workspace) context = generic_package_context.get_context() context.codechecker_workspace = workspace context.db_username = args.dbusername log_file = build_manager.check_log_file(args) if not log_file: log_file = build_manager.generate_log_file(args, context, args.quiet_build) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.exit(1) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() LOG.debug("Checker server started.") analyzer.run_check(args, actions, context) LOG.info("Analysis has finished.") db_data = "" if args.postgresql: db_data += " --postgresql" \ + " --dbname " + args.dbname \ + " --dbport " + str(args.dbport) \ + " --dbusername " + args.dbusername LOG.info("To view results run:\nCodeChecker server -w " + workspace + db_data) except Exception as ex: LOG.error(ex) import traceback print(traceback.format_exc())
def handle_server(args): """ starts the report viewer server """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # if no workspace value was set for some reason # in args set the default value workspace = util.get_default_workspace() # WARNING # in case of SQLite args.dbaddress default value is used # for which the is_localhost should return true local_db = util.is_localhost(args.dbaddress) if local_db and not os.path.exists(workspace): os.makedirs(workspace) if args.suppress is None: LOG.warning( 'WARNING! No suppress file was given, suppressed results will be only stored in the database.' ) else: if not os.path.exists(args.suppress): LOG.error('Suppress file ' + args.suppress + ' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting codechecker server and database server.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) else: LOG.debug('Starting database.') sql_server.start(context.db_version_info, wait_for_start=True, init=True) # start database viewer db_connection_string = sql_server.get_connection_string() suppress_handler = generic_package_suppress_handler.GenericSuppressHandler( ) try: suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) except AttributeError as aerr: # suppress file was not set LOG.debug(aerr) package_data = {} package_data['www_root'] = context.www_root package_data['doc_root'] = context.doc_root checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_check(args): """ Runs the original build and logs the buildactions Based on the log runs the analysis """ try: if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) try: workspace = args.workspace except AttributeError: # if no workspace value was set for some reason # in args set the default value workspace = util.get_default_workspace() workspace = os.path.realpath(workspace) if not os.path.isdir(workspace): os.mkdir(workspace) context = generic_package_context.get_context() context.codechecker_workspace = workspace context.db_username = args.dbusername log_file = build_manager.check_log_file(args) if not log_file: log_file = build_manager.generate_log_file(args, context) if not log_file: LOG.error("Failed to generate compilation command file: " + log_file) sys.ecit(1) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) LOG.debug("Checker server started.") analyzer.run_check(args, actions, context) LOG.info("Analysis has finished.") db_data = "" if args.postgresql: db_data += " --postgresql" \ + " --dbname " + args.dbname \ + " --dbport " + str(args.dbport) \ + " --dbusername " + args.dbusername LOG.info("To view results run:\nCodeChecker server -w " + workspace + db_data) except Exception as ex: LOG.error(ex) import traceback print(traceback.format_exc())
def handle_check(args): """ Check mode. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) log_file = "" if args.logfile: log_file = os.path.realpath(args.logfile) if not os.path.exists(args.logfile): LOG.info("Log file does not exists.") return context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) #load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) if args.command: # check if logger bin exists if not os.path.isfile(context.path_logger_bin): LOG.debug('Logger binary not found! Required for logging.') sys.exit(1) # check if logger lib exists if not os.path.exists(context.path_logger_lib): LOG.debug('Logger library directory not found! Libs are requires for logging.') sys.exit(1) log_file = os.path.join(context.codechecker_workspace, \ context.build_log_file_name) if os.path.exists(log_file): os.remove(log_file) open(log_file, 'a').close() # same as linux's touch perform_build_command(log_file, args.command, context) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) setup_connection_manager_db(args) client.ConnectionManager.port = util.get_free_port() if args.jobs <= 0: args.jobs = 1 package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = os.path.join(args.workspace, package_version) \ if not args.suppress \ else os.path.realpath(args.suppress) send_suppress = False if os.path.exists(suppress_file): send_suppress = True client.ConnectionManager.run_env = check_env client.ConnectionManager.start_server(args.dbname, context) LOG.debug("Checker server started.") with client.get_connection() as connection: try: context.run_id = connection.add_checker_run(' '.join(sys.argv), \ args.name, package_version, args.update) except shared.ttypes.RequestFailed as thrift_ex: if 'violates unique constraint "runs_name_key"' not in thrift_ex.message: # not the unique name was the problem raise else: LOG.info("Name was already used in the database please choose another unique name for checking.") sys.exit(1) if send_suppress: client.send_suppress(connection, suppress_file) #static_analyzer.clean = args.clean if args.clean: #cleaning up previous results LOG.debug("Cleaning previous plist files in "+ \ context.codechecker_workspace) plist_files = glob.glob(os.path.join(context.codechecker_workspace,'*.plist')) for pf in plist_files: os.remove(pf) report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name) if not os.path.exists(report_output): os.mkdir(report_output) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = report_output # first add checkers from config file static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError as aerr: LOG.debug('No checkers were defined in the command line') if args.configfile: static_analyzer.add_config(connection, args.configfile) # else: # add default config from package # static_analyzer.add_config(connection, context.checkers_config_file) if args.skipfile: static_analyzer.add_skip(connection, os.path.realpath(args.skipfile)) LOG.info("Static analysis is starting..") start_time = time.time() LOG.debug("Starting workers...") start_workers(static_analyzer, actions, args.jobs, context) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run() LOG.info("Analysis length: " + str(end_time - start_time) + " sec.") LOG.info("Analysis has finished.")
def handle_server(args): if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) check_options_validity(args) if args.suppress is None: LOG.warning('WARNING! No suppress file was given, suppressed results will be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file '+args.suppress+' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername setup_connection_manager_db(args) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) client.ConnectionManager.run_env = check_env if args.check_port: LOG.debug('Starting codechecker server and postgres.') client.ConnectionManager.host = args.check_address client.ConnectionManager.port = args.check_port client.ConnectionManager.run_env = check_env # starts posgres client.ConnectionManager.start_server(args.dbname, context) else: LOG.debug('Starting postgres.') client.ConnectionManager.start_postgres(context, init_db=False) client.ConnectionManager.block_until_db_start_proc_free(context) # start database viewer db_connection_string = 'postgresql://'+args.dbusername+ \ '@'+args.dbaddress+ \ ':'+str(args.dbport)+ \ '/'+args.dbname suppress_handler = generic_package_suppress_handler.GenericSuppressHandler() suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) package_data = {} package_data['www_root'] = context.www_root package_data['doc_root'] = context.doc_root checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)
def handle_check(args): """ Check mode. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) # load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) log_file = _check_generate_log_file(args, context) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) if args.jobs <= 0: args.jobs = 1 package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = os.path.join(args.workspace, package_version) \ if not args.suppress \ else os.path.realpath(args.suppress) send_suppress = False if os.path.exists(suppress_file): send_suppress = True sql_server.start(wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) LOG.debug("Checker server started.") with client.get_connection() as connection: try: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, package_version, args.update) except shared.ttypes.RequestFailed as thrift_ex: if 'violates unique constraint "uq_runs_name"' not in thrift_ex.message: # not the unique name was the problem raise else: LOG.info("Name was already used in the database please choose another unique name for checking.") sys.exit(1) if args.update: # clean previous suppress information client.clean_suppress(connection, context.run_id) if send_suppress: client.send_suppress(connection, suppress_file) report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name) if not os.path.exists(report_output): os.mkdir(report_output) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = report_output # first add checkers from config file static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError: LOG.debug('No checkers were defined in the command line') if args.configfile: static_analyzer.add_config(connection, args.configfile) # else: # add default config from package # static_analyzer.add_config(connection, context.checkers_config_file) if args.skipfile: static_analyzer.add_skip(connection, os.path.realpath(args.skipfile)) LOG.info("Static analysis is starting..") start_time = time.time() LOG.debug("Starting workers...") start_workers(static_analyzer, actions, args.jobs, context) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run() LOG.info("Analysis length: " + str(end_time - start_time) + " sec.") if not args.keep_tmp: LOG.debug("Removing plist files in " + context.codechecker_workspace) plist_files = glob.glob(os.path.join(report_output, '*.plist')) for pf in plist_files: os.remove(pf) LOG.info("Analysis has finished.")
def handle_server(args): if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) check_options_validity(args) if args.suppress is None: LOG.warning('WARNING! No suppress file was given, suppressed results will be only stored in the database.') else: if not os.path.exists(args.suppress): LOG.error('Suppress file '+args.suppress+' not found!') sys.exit(1) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, args.check_address, args.check_port) if args.check_port: LOG.debug('Starting codechecker server and database server.') sql_server.start(wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) else: LOG.debug('Starting database.') sql_server.start(wait_for_start=True, init=True) # start database viewer db_connection_string = sql_server.get_connection_string() suppress_handler = generic_package_suppress_handler.GenericSuppressHandler() suppress_handler.suppress_file = args.suppress LOG.debug('Using suppress file: ' + str(suppress_handler.suppress_file)) package_data = {} package_data['www_root'] = context.www_root package_data['doc_root'] = context.doc_root checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') package_data['checker_md_docs'] = checker_md_docs with open(checker_md_docs_map, 'r') as dFile: checker_md_docs_map = json.load(dFile) package_data['checker_md_docs_map'] = checker_md_docs_map client_db_access_server.start_server(package_data, args.view_port, db_connection_string, suppress_handler, args.not_host_only, context.db_version_info)