def run(analyzer, action): ''' This function implements the "check" feature. ''' with client.get_connection() as connection: def on_result(**args): check_command = args['check_command'] action_id = connection.add_build_action(action.original_command, check_command) err_code = args['err_code'] if err_code == 0: client.send_plist_content(connection, args['report_plist'], action_id, analyzer.run_id, analyzer.severity_map, analyzer.should_skip) msg = 'Checking %s is done.' % (args['source_file']) LOG.debug(msg + '\n' + check_command) LOG.info(msg) connection.finish_build_action(action_id, args['err_message']) return err_code return _run_action(analyzer, action, on_result)
def handle_results(self): """ send the plist content to the database server API calls should be used in one connection - addBuildAction - addReport - needFileContent - addFileContent - finishBuildAction """ with client.get_connection() as connection: LOG.debug('Storing original build and analyzer command to the database') _, source_file_name = ntpath.split(self.analyzed_source_file) analisys_id = connection.add_build_action(self.__run_id, self.buildaction.original_command, ' '.join(self.analyzer_cmd), self.buildaction.analyzer_type, source_file_name) # store buildaction and analyzer command to the database if self.analyzer_returncode == 0: LOG.info(self.buildaction.analyzer_type + ' analyzed ' + source_file_name + ' successfully.') plist_file = self.get_analyzer_result_file() try: files, bugs = plist_parser.parse_plist(plist_file) except Exception as ex: LOG.debug(str(ex)) msg = 'Parsing the generated result file failed' LOG.error(msg + ' ' + plist_file) connection.finish_build_action(analisys_id, msg) return 1 self.__store_bugs(files, bugs, connection, analisys_id) else: LOG.info('Analysing ' + source_file_name + ' with ' + self.buildaction.analyzer_type + ' failed.') connection.finish_build_action(analisys_id, self.analyzer_stderr)
def handle_plist(args): context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername if not args.stdout: args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() with client.get_connection() as connection: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, context.version, args.force) pool = multiprocessing.Pool(args.jobs) try: items = [(plist, args, context) for plist in os.listdir(args.directory)] pool.map_async(consume_plist, items, 1).get(float('inf')) pool.close() except Exception: pool.terminate() raise finally: pool.join() if not args.stdout: log_startserver_hint(args)
def handle_results(self): """ Send the plist content to the database. Server API calls should be used in one connection. - addBuildAction - addReport - needFileContent - addFileContent - finishBuildAction """ with client.get_connection() as connection: LOG.debug('Storing original build and analyzer command ' 'to the database.') _, source_file_name = ntpath.split(self.analyzed_source_file) if LoggerFactory.get_log_level() == logger.DEBUG: analyzer_cmd = ' '.join(self.analyzer_cmd) else: analyzer_cmd = '' build_cmd_hash = self.buildaction.original_command_hash analysis_id = \ connection.add_build_action(self.__run_id, build_cmd_hash, analyzer_cmd, self.buildaction.analyzer_type, source_file_name) assert self.analyzer_returncode == 0 plist_file = self.analyzer_result_file try: files, bugs = plist_parser.parse_plist(plist_file) except Exception as ex: LOG.debug(str(ex)) msg = 'Parsing the generated result file failed.' LOG.error(msg + ' ' + plist_file) connection.finish_build_action(analysis_id, msg) return 1 self.__store_bugs(files, bugs, connection, analysis_id) connection.finish_build_action(analysis_id, self.analyzer_stderr)
def handle_plist(args): context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername if not args.stdout: args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) sql_server.start(context.db_version_info, wait_for_start=True, init=True) conn_mgr.start_report_server() with client.get_connection() as connection: context.run_id = connection.add_checker_run( ' '.join(sys.argv), args.name, context.version, args.force) pool = multiprocessing.Pool(args.jobs) try: items = [(plist, args, context) for plist in os.listdir(args.directory)] pool.map_async(consume_plist, items, 1).get(float('inf')) pool.close() except Exception: pool.terminate() raise finally: pool.join() if not args.stdout: log_startserver_hint(args)
def handle_plist(self, plist): with client.get_connection() as connection: # TODO: When the analyzer name can be read from PList, then it # should be passed too. # TODO: File name should be read from the PList and passed. analysis_id = connection.add_build_action( self.__run_id, plist, 'Build action from plist', '', '') try: files, bugs = plist_parser.parse_plist(plist) except Exception as ex: msg = 'Parsing the generated result file failed' LOG.error(msg + ' ' + plist) LOG.error(str(ex)) connection.finish_build_action(analysis_id, msg) return 1 self.__store_bugs(files, bugs, connection, analysis_id) connection.finish_build_action(analysis_id, self.analyzer_stderr)
def handle_plist(self, plist): with client.get_connection() as connection: # TODO: When the analyzer name can be read from PList, then it # should be passed too. # TODO: File name should be read from the PList and passed. analisys_id = connection.add_build_action(self.__run_id, plist, 'Build action from plist', '', '') try: files, bugs = plist_parser.parse_plist(plist) except Exception as ex: msg = 'Parsing the generated result file failed' LOG.error(msg + ' ' + plist) connection.finish_build_action(analysis_id, msg) return 1 self.__store_bugs(files, bugs, connection, analisys_id) connection.finish_build_action(analisys_id, self.analyzer_stderr)
def run_check(args, actions, context): """ Prepare: - analyzer config handlers - skiplist handling - analyzer severity levels Stores analysis related data to the database and starts the analysis. """ if args.jobs <= 0: args.jobs = 1 LOG.debug_analyzer("Checking supported analyzers.") enabled_analyzers = analyzer_types.check_supported_analyzers( args.analyzers, context) actions = prepare_actions(actions, enabled_analyzers) suppress_file = '' try: suppress_file = os.path.realpath(args.suppress) except AttributeError: LOG.debug_analyzer('Suppress file was not set in the command line.') # Create one skip list handler shared between the analysis manager workers. skip_handler = _get_skip_handler(args) with client.get_connection() as connection: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, context.version, args.force) # Clean previous suppress information. client.clean_suppress(connection, context.run_id) if os.path.exists(suppress_file): client.send_suppress(context.run_id, connection, suppress_file) analyzer_config_map = analyzer_types. \ build_config_handlers(args, context, enabled_analyzers, connection) if skip_handler: connection.add_skip_paths(context.run_id, skip_handler.get_skiplist()) __print_analyzer_version(context, analyzer_config_map) LOG.info("Static analysis is starting ...") start_time = time.time() analysis_manager.start_workers(args, actions, context, analyzer_config_map, skip_handler) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run(context.run_id) LOG.info("Analysis length: " + str(end_time - start_time) + " sec.")
def run(analyzer, action): def signal_handler(*args, **kwargs): # Clang does not kill its child processes, so I have to try: g_pid = result.pid os.killpg(g_pid, signal.SIGTERM) finally: sys.exit(os.EX_OK) signal.signal(signal.SIGINT, signal_handler) current_cmd = list(analyzer.cmd) for checker_name, enabled in analyzer.checkers: if enabled: current_cmd.append('-Xclang') current_cmd.append('-analyzer-checker=' + checker_name) else: current_cmd.append('-Xclang') current_cmd.append('-analyzer-disable-checker') current_cmd.append('-Xclang') current_cmd.append(checker_name) current_cmd.extend(action.analyzer_options) # Add checker config if analyzer.config and analyzer.has_plugin: current_cmd.append('-Xclang') current_cmd.append('-plugin-arg-checkercfg') current_cmd.append('-Xclang') current_cmd.append(analyzer.config) # Set lang current_cmd.append('-x') current_cmd.append(action.lang) for source in action.sources: if analyzer.should_skip(source): LOG.debug(source + ' is skipped.') continue source_name = source[source.rfind('/') + 1:].replace('.', '_') if not os.path.exists(analyzer.workspace): os.mkdir(analyzer.workspace) report_plist = os.path.join(analyzer.workspace, source_name + '_' + str(action.id) + '.plist') extender = list() extender.append('-o') extender.append(report_plist) extender.append(source) check_cmd = current_cmd + extender check_cmd_str = ' '.join(check_cmd) with client.get_connection() as connection: action_id = connection.add_build_action(action.original_command, check_cmd_str, action.target) LOG.debug(' '.join(check_cmd)) result = subprocess.Popen(check_cmd, bufsize=-1, env=analyzer.env, preexec_fn=os.setsid, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = result.communicate() failure = '' source_path, source_file = ntpath.split(source) if result.returncode != 0: failure = stdout + '\n' + stderr msg = 'Checking %s has failed.' % (source_file) LOG.debug(msg + '\n' + check_cmd_str + '\n' + failure) LOG.info(msg) else: client.send_plist_content(connection, report_plist, action_id, analyzer.run_id, analyzer.severity_map, analyzer.should_skip, analyzer.module_id) msg = 'Checking %s is done.' % (source_file) LOG.debug(msg + '\n' + check_cmd_str) LOG.info(msg) LOG.debug(stdout) LOG.debug(stderr) connection.finish_build_action(action_id, failure) return result.returncode
def run_check(args, actions, context): """ prepare: - analyzer config handlers - skiplist handling - analyzer severity levels stores analysis related data to the database and starts the analysis """ if args.jobs <= 0: args.jobs = 1 LOG.debug_analyzer("Checking supported analyzers.") enabled_analyzers = analyzer_types.check_supported_analyzers( args.analyzers, context) # load severity map from config file LOG.debug_analyzer("Loading checker severity map.") if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) actions = prepare_actions(actions, enabled_analyzers) analyzer_config_map = {} package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = '' try: suppress_file = os.path.realpath(args.suppress) except AttributeError: LOG.debug_analyzer('Suppress file was not set in the command line') # Create one skip list handler shared between the analysis manager workers skip_handler = None try: if args.skipfile: LOG.debug_analyzer("Creating skiplist handler.") skip_handler = skiplist_handler.SkipListHandler(args.skipfile) except AttributeError: LOG.debug_analyzer('Skip file was not set in the command line') with client.get_connection() as connection: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, package_version, args.force) # clean previous suppress information client.clean_suppress(connection, context.run_id) if os.path.exists(suppress_file): client.send_suppress(context.run_id, connection, suppress_file) analyzer_config_map = analyzer_types.build_config_handlers(args, context, enabled_analyzers, connection) if skip_handler: connection.add_skip_paths(context.run_id, skip_handler.get_skiplist()) LOG.info("Static analysis is starting ...") start_time = time.time() analysis_manager.start_workers(args, actions, context, analyzer_config_map, skip_handler) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run(context.run_id) LOG.info("Analysis length: " + str(end_time - start_time) + " sec.")
def handle_results(self): """ send the plist content to the database server API calls should be used in one connection - addBuildAction - addReport - needFileContent - addFileContent - finishBuildAction """ with client.get_connection() as connection: LOG.debug( 'Storing original build and analyzer command to the database') analisys_id = connection.add_build_action( self.__run_id, self.buildaction.original_command, ' '.join(self.analyzer_cmd)) # store buildaction and analyzer command to the database _, source_file_name = ntpath.split(self.analyzed_source_file) if self.analyzer_returncode == 0: LOG.info(self.buildaction.analyzer_type + ' analyzed ' + source_file_name + ' successfully.') plist_file = self.get_analyzer_result_file() try: files, bugs = plist_parser.parse_plist(plist_file) except Exception as ex: LOG.debug(str(ex)) msg = 'Parsing the generated result file failed' LOG.error(msg + ' ' + plist_file) connection.finish_build_action(analisys_id, msg) return 1 file_ids = {} # Send content of file to the server if needed for file_name in files: file_descriptor = connection.need_file_content( self.__run_id, file_name) file_ids[file_name] = file_descriptor.fileId if file_descriptor.needed: with open(file_name, 'r') as source_file: file_content = source_file.read() compressed_file = zlib.compress( file_content, zlib.Z_BEST_COMPRESSION) # TODO: we may not use the file content in the end # depending on skippaths LOG.debug('storing file content to the database') connection.add_file_content(file_descriptor.fileId, compressed_file) # skipping bugs in header files handled here report_ids = [] for bug in bugs: events = bug.events() # skip list handler can be None if no config file is set if self.skiplist_handler: if events and self.skiplist_handler.should_skip( events[-1].start_pos.file_path): # Issue #20: this bug is in a file which should be skipped LOG.debug(bug.hash_value + ' is skipped (in ' + events[-1].start_pos.file_path + ")") continue # create remaining data for bugs and send them to the server bug_paths = [] for path in bug.paths(): bug_paths.append( shared.ttypes.BugPathPos( path.start_pos.line, path.start_pos.col, path.end_pos.line, path.end_pos.col, file_ids[path.start_pos.file_path])) bug_events = [] for event in bug.events(): bug_events.append( shared.ttypes.BugPathEvent( event.start_pos.line, event.start_pos.col, event.end_pos.line, event.end_pos.col, event.msg, file_ids[event.start_pos.file_path])) bug_hash = bug.hash_value severity_name = self.severity_map.get( bug.checker_name, 'UNSPECIFIED') severity = shared.ttypes.Severity._NAMES_TO_VALUES[ severity_name] suppress = False source_file = bug.file_path last_bug_event = bug.events()[-1] bug_line = last_bug_event.start_pos.line sp_handler = suppress_handler.SourceSuppressHandler( source_file, bug_line) # check for suppress comment supp = sp_handler.check_source_suppress() if supp: # something shoud be suppressed suppress_checkers = sp_handler.suppressed_checkers() if bug.checker_name in suppress_checkers or \ suppress_checkers == ['all']: suppress = True file_path, file_name = ntpath.split(source_file) # checker_hash, file_name, comment to_suppress = (bug_hash, file_name, sp_handler.suppress_comment()) LOG.debug(to_suppress) connection.add_suppress_bug( self.__run_id, [to_suppress]) LOG.debug('Storing check results to the database') report_id = connection.add_report( analisys_id, file_ids[bug.file_path], bug_hash, bug.msg, bug_paths, bug_events, bug.checker_name, bug.category, bug.type, severity, suppress) report_ids.append(report_id) else: LOG.info('Analysing ' + source_file_name + ' with ' + self.buildaction.analyzer_type + ' failed.') connection.finish_build_action(analisys_id, self.analyzer_stderr)
def handle_check(args): """ Check mode. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) log_file = "" if args.logfile: log_file = os.path.realpath(args.logfile) if not os.path.exists(args.logfile): LOG.info("Log file does not exists.") return context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) #load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) if args.command: # check if logger bin exists if not os.path.isfile(context.path_logger_bin): LOG.debug('Logger binary not found! Required for logging.') sys.exit(1) # check if logger lib exists if not os.path.exists(context.path_logger_lib): LOG.debug('Logger library directory not found! Libs are requires for logging.') sys.exit(1) log_file = os.path.join(context.codechecker_workspace, \ context.build_log_file_name) if os.path.exists(log_file): os.remove(log_file) open(log_file, 'a').close() # same as linux's touch perform_build_command(log_file, args.command, context) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) setup_connection_manager_db(args) client.ConnectionManager.port = util.get_free_port() if args.jobs <= 0: args.jobs = 1 package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = os.path.join(args.workspace, package_version) \ if not args.suppress \ else os.path.realpath(args.suppress) send_suppress = False if os.path.exists(suppress_file): send_suppress = True client.ConnectionManager.run_env = check_env client.ConnectionManager.start_server(args.dbname, context) LOG.debug("Checker server started.") with client.get_connection() as connection: try: context.run_id = connection.add_checker_run(' '.join(sys.argv), \ args.name, package_version, args.update) except shared.ttypes.RequestFailed as thrift_ex: if 'violates unique constraint "runs_name_key"' not in thrift_ex.message: # not the unique name was the problem raise else: LOG.info("Name was already used in the database please choose another unique name for checking.") sys.exit(1) if send_suppress: client.send_suppress(connection, suppress_file) #static_analyzer.clean = args.clean if args.clean: #cleaning up previous results LOG.debug("Cleaning previous plist files in "+ \ context.codechecker_workspace) plist_files = glob.glob(os.path.join(context.codechecker_workspace,'*.plist')) for pf in plist_files: os.remove(pf) report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name) if not os.path.exists(report_output): os.mkdir(report_output) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = report_output # first add checkers from config file static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError as aerr: LOG.debug('No checkers were defined in the command line') if args.configfile: static_analyzer.add_config(connection, args.configfile) # else: # add default config from package # static_analyzer.add_config(connection, context.checkers_config_file) if args.skipfile: static_analyzer.add_skip(connection, os.path.realpath(args.skipfile)) LOG.info("Static analysis is starting..") start_time = time.time() LOG.debug("Starting workers...") start_workers(static_analyzer, actions, args.jobs, context) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run() LOG.info("Analysis length: " + str(end_time - start_time) + " sec.") LOG.info("Analysis has finished.")
def run_check(args, actions, context): """ prepare: - analyzer config handlers - skiplist handling - analyzer severity levels stores analysis related data to the database and starts the analysis """ if args.jobs <= 0: args.jobs = 1 LOG.debug_analyzer("Checking supported analyzers.") enabled_analyzers = analyzer_types.check_supported_analyzers( args.analyzers, context) # load severity map from config file LOG.debug_analyzer("Loading checker severity map.") if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) actions = prepare_actions(actions, enabled_analyzers) analyzer_config_map = {} package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = '' try: suppress_file = os.path.realpath(args.suppress) except AttributeError: LOG.debug_analyzer('Suppress file was not set in the command line') # Create one skip list handler shared between the analysis manager workers skip_handler = None try: if args.skipfile: LOG.debug_analyzer("Creating skiplist handler.") skip_handler = skiplist_handler.SkipListHandler(args.skipfile) except AttributeError: LOG.debug_analyzer('Skip file was not set in the command line') with client.get_connection() as connection: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, package_version, args.force) # clean previous suppress information client.clean_suppress(connection, context.run_id) if os.path.exists(suppress_file): client.send_suppress(context.run_id, connection, suppress_file) analyzer_config_map = analyzer_types.build_config_handlers( args, context, enabled_analyzers, connection) if skip_handler: connection.add_skip_paths(context.run_id, skip_handler.get_skiplist()) LOG.info("Static analysis is starting ...") start_time = time.time() analysis_manager.start_workers(args, actions, context, analyzer_config_map, skip_handler) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run(context.run_id) LOG.info("Analysis length: " + str(end_time - start_time) + " sec.")
def handle_check(args): """ Check mode. """ if not host_check.check_zlib(): LOG.error("zlib error") sys.exit(1) args.workspace = os.path.realpath(args.workspace) if not os.path.isdir(args.workspace): os.mkdir(args.workspace) context = generic_package_context.get_context() context.codechecker_workspace = args.workspace context.db_username = args.dbusername check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) compiler_bin = context.compiler_bin if not host_check.check_clang(compiler_bin, check_env): sys.exit(1) # load severity map from config file if os.path.exists(context.checkers_severity_map_file): with open(context.checkers_severity_map_file, 'r') as sev_conf_file: severity_config = sev_conf_file.read() context.severity_map = json.loads(severity_config) log_file = _check_generate_log_file(args, context) try: actions = log_parser.parse_log(log_file) except Exception as ex: LOG.error(ex) sys.exit(1) if not actions: LOG.warning('There are no build actions in the log file.') sys.exit(1) sql_server = SQLServer.from_cmdline_args(args, context.codechecker_workspace, context.migration_root, check_env) conn_mgr = client.ConnectionManager(sql_server, 'localhost', util.get_free_port()) if args.jobs <= 0: args.jobs = 1 package_version = context.version['major'] + '.' + context.version['minor'] suppress_file = os.path.join(args.workspace, package_version) \ if not args.suppress \ else os.path.realpath(args.suppress) send_suppress = False if os.path.exists(suppress_file): send_suppress = True sql_server.start(wait_for_start=True, init=True) conn_mgr.start_report_server(context.db_version_info) LOG.debug("Checker server started.") with client.get_connection() as connection: try: context.run_id = connection.add_checker_run(' '.join(sys.argv), args.name, package_version, args.update) except shared.ttypes.RequestFailed as thrift_ex: if 'violates unique constraint "uq_runs_name"' not in thrift_ex.message: # not the unique name was the problem raise else: LOG.info("Name was already used in the database please choose another unique name for checking.") sys.exit(1) if args.update: # clean previous suppress information client.clean_suppress(connection, context.run_id) if send_suppress: client.send_suppress(connection, suppress_file) report_output = os.path.join(context.codechecker_workspace, context.report_output_dir_name) if not os.path.exists(report_output): os.mkdir(report_output) static_analyzer = analyzer.StaticAnalyzer(context) static_analyzer.workspace = report_output # first add checkers from config file static_analyzer.checkers = context.default_checkers # add user defined checkers try: static_analyzer.checkers = args.ordered_checker_args except AttributeError: LOG.debug('No checkers were defined in the command line') if args.configfile: static_analyzer.add_config(connection, args.configfile) # else: # add default config from package # static_analyzer.add_config(connection, context.checkers_config_file) if args.skipfile: static_analyzer.add_skip(connection, os.path.realpath(args.skipfile)) LOG.info("Static analysis is starting..") start_time = time.time() LOG.debug("Starting workers...") start_workers(static_analyzer, actions, args.jobs, context) end_time = time.time() with client.get_connection() as connection: connection.finish_checker_run() LOG.info("Analysis length: " + str(end_time - start_time) + " sec.") if not args.keep_tmp: LOG.debug("Removing plist files in " + context.codechecker_workspace) plist_files = glob.glob(os.path.join(report_output, '*.plist')) for pf in plist_files: os.remove(pf) LOG.info("Analysis has finished.")