def execute_task(fuzzer_name_and_revision, job_type): """Execute corpus pruning task.""" # TODO(ochang): Remove this once remaining jobs in queue are all processed. if '@' in fuzzer_name_and_revision: full_fuzzer_name, revision = fuzzer_name_and_revision.split('@') revision = revisions.convert_revision_to_integer(revision) else: full_fuzzer_name = fuzzer_name_and_revision revision = 0 fuzz_target = data_handler.get_fuzz_target(full_fuzzer_name) task_name = 'corpus_pruning_%s_%s' % (full_fuzzer_name, job_type) # Get status of last execution. last_execution_metadata = data_handler.get_task_status(task_name) last_execution_failed = (last_execution_metadata and last_execution_metadata.status == data_types.TaskState.ERROR) # Make sure we're the only instance running for the given fuzzer and # job_type. if not data_handler.update_task_status(task_name, data_types.TaskState.STARTED): logs.log('A previous corpus pruning task is still running, exiting.') return # Setup fuzzer and data bundle. if not setup.update_fuzzer_and_data_bundles(fuzz_target.engine): raise CorpusPruningException('Failed to set up fuzzer %s.' % fuzz_target.engine) use_minijail = environment.get_value('USE_MINIJAIL') # TODO(unassigned): Use coverage information for better selection here. cross_pollinate_fuzzers = _get_cross_pollinate_fuzzers( fuzz_target.engine, full_fuzzer_name) context = Context(fuzz_target, cross_pollinate_fuzzers, use_minijail) # Copy global blacklist into local suppressions file if LSan is enabled. is_lsan_enabled = environment.get_value('LSAN') if is_lsan_enabled: # TODO(ochang): Copy this to untrusted worker. leak_blacklist.copy_global_to_local_blacklist() try: result = do_corpus_pruning(context, last_execution_failed, revision) _save_coverage_information(context, result) _process_corpus_crashes(context, result) except CorpusPruningException as e: logs.log_error('Corpus pruning failed: %s.' % str(e)) data_handler.update_task_status(task_name, data_types.TaskState.ERROR) return finally: context.cleanup() data_handler.update_task_status(task_name, data_types.TaskState.FINISHED)
def execute_task(full_fuzzer_name, job_type): """Execute corpus pruning task.""" fuzz_target = data_handler.get_fuzz_target(full_fuzzer_name) task_name = 'corpus_pruning_%s_%s' % (full_fuzzer_name, job_type) revision = 0 # Trunk revision # Get status of last execution. last_execution_metadata = data_handler.get_task_status(task_name) last_execution_failed = (last_execution_metadata and last_execution_metadata.status == data_types.TaskState.ERROR) # Make sure we're the only instance running for the given fuzzer and # job_type. if not data_handler.update_task_status(task_name, data_types.TaskState.STARTED): logs.log('A previous corpus pruning task is still running, exiting.') return # Setup fuzzer and data bundle. if not setup.update_fuzzer_and_data_bundles(fuzz_target.engine): raise CorpusPruningException('Failed to set up fuzzer %s.' % fuzz_target.engine) cross_pollination_method, tag = choose_cross_pollination_strategy( full_fuzzer_name) # TODO(unassigned): Use coverage information for better selection here. cross_pollinate_fuzzers = _get_cross_pollinate_fuzzers( fuzz_target.engine, full_fuzzer_name, cross_pollination_method, tag) context = Context(fuzz_target, cross_pollinate_fuzzers, cross_pollination_method, tag) # Copy global blacklist into local suppressions file if LSan is enabled. is_lsan_enabled = environment.get_value('LSAN') if is_lsan_enabled: # TODO(ochang): Copy this to untrusted worker. leak_blacklist.copy_global_to_local_blacklist() try: result = do_corpus_pruning(context, last_execution_failed, revision) _record_cross_pollination_stats(result.cross_pollination_stats) _save_coverage_information(context, result) _process_corpus_crashes(context, result) except Exception: logs.log_error('Corpus pruning failed.') data_handler.update_task_status(task_name, data_types.TaskState.ERROR) return finally: context.cleanup() data_handler.update_task_status(task_name, data_types.TaskState.FINISHED)