def process_testcase(engine_name, tool_name, target_name, arguments, testcase_path, output_path, timeout): """Process testcase on untrusted worker.""" if tool_name == 'minimize': operation = untrusted_runner_pb2.ProcessTestcaseRequest.MINIMIZE else: operation = untrusted_runner_pb2.ProcessTestcaseRequest.CLEANSE rebased_testcase_path = file_host.rebase_to_worker_root(testcase_path) file_host.copy_file_to_worker(testcase_path, rebased_testcase_path) request = untrusted_runner_pb2.ProcessTestcaseRequest( engine=engine_name, operation=operation, target_name=target_name, arguments=arguments, testcase_path=file_host.rebase_to_worker_root(testcase_path), output_path=file_host.rebase_to_worker_root(output_path), timeout=timeout) response = host.stub().ProcessTestcase(request) rebased_output_path = file_host.rebase_to_worker_root(output_path) file_host.copy_file_from_worker(rebased_output_path, output_path) return engine.ReproduceResult(list(response.command), response.return_code, response.time_executed, response.output)
def engine_reproduce(engine_impl, target_name, testcase_path, arguments, timeout): """Run engine reproduce on untrusted worker.""" rebased_testcase_path = file_host.rebase_to_worker_root(testcase_path) file_host.copy_file_to_worker(testcase_path, rebased_testcase_path) request = untrusted_runner_pb2.EngineReproduceRequest( engine=engine_impl.name, target_name=target_name, testcase_path=rebased_testcase_path, arguments=arguments, timeout=timeout) try: response = host.stub().EngineReproduce(request) except grpc.RpcError as e: if 'TargetNotFoundError' in str(e, encoding='utf-8', errors='ignore'): # Resurface the right exception. raise testcase_manager.TargetNotFoundError( 'Failed to find target ' + target_name) else: raise return engine.ReproduceResult(list(response.command), response.return_code, response.time_executed, response.output)
def _run_libfuzzer_testcase(testcase, testcase_file_path): """Run libFuzzer testcase, and return the CrashResult.""" # Cleanup any existing application instances and temp directories. process_handler.cleanup_stale_processes() shell.clear_temp_directory() if environment.is_trusted_host(): from bot.untrusted_runner import file_host file_host.copy_file_to_worker( testcase_file_path, file_host.rebase_to_worker_root(testcase_file_path)) test_timeout = environment.get_value('TEST_TIMEOUT', process_handler.DEFAULT_TEST_TIMEOUT) repro_command = tests.get_command_line_for_application( file_to_run=testcase_file_path, needs_http=testcase.http_flag) return_code, crash_time, output = process_handler.run_process( repro_command, timeout=test_timeout) return CrashResult(return_code, crash_time, output)
def test_copy_file_to_worker_intermediate(self): """Tests remote copy_file_to_worker creating intermediate paths.""" src_path = os.path.join(self.tmp_dir, 'src') with open(src_path, 'w') as f: f.write(TEST_FILE_CONTENTS) dest_path = os.path.join(self.tmp_dir, 'dir1', 'dir2', 'dst') self.assertTrue(file_host.copy_file_to_worker(src_path, dest_path)) with open(dest_path) as f: self.assertEqual(f.read(), TEST_FILE_CONTENTS)
def test_copy_file_to_worker(self): """Tests remote copy_file_to_worker.""" src_path = os.path.join(self.tmp_dir, 'src') with open(src_path, 'wb') as f: f.write(TEST_FILE_CONTENTS) dest_path = os.path.join(self.tmp_dir, 'dst') self.assertTrue(file_host.copy_file_to_worker(src_path, dest_path)) with open(dest_path, 'rb') as f: self.assertEqual(f.read(), TEST_FILE_CONTENTS)
def test_copy_file_to_worker(self): """Test file_host.copy_file_to_worker.""" contents = ('A' * config.FILE_TRANSFER_CHUNK_SIZE + 'B' * config.FILE_TRANSFER_CHUNK_SIZE + 'C' * config.FILE_TRANSFER_CHUNK_SIZE) self.fs.create_file('/file', contents=contents) def mock_copy_file_to(iterator, metadata): """Mock copy file to.""" chunks = [chunk.data for chunk in iterator] self.assertEqual(3, len(chunks)) self.assertEqual([('path-bin', '/file')], metadata) data = ''.join(chunks) self.assertEqual(data, contents) return untrusted_runner_pb2.CopyFileToResponse(result=True) self.mock.stub().CopyFileTo.side_effect = mock_copy_file_to self.assertTrue(file_host.copy_file_to_worker('/file', '/file'))
def update_data_bundle(fuzzer, data_bundle): """Updates a data bundle to the latest version.""" # This module can't be in the global imports due to appengine issues # with multiprocessing and psutil imports. from google_cloud_utils import gsutil # If we are using a data bundle on NFS, it is expected that our testcases # will usually be large enough that we would fill up our tmpfs directory # pretty quickly. So, change it to use an on-disk directory. if not data_bundle.is_local: testcase_disk_directory = environment.get_value('FUZZ_INPUTS_DISK') environment.set_value('FUZZ_INPUTS', testcase_disk_directory) data_bundle_directory = get_data_bundle_directory(fuzzer.name) if not data_bundle_directory: logs.log_error('Failed to setup data bundle %s.' % data_bundle.name) return False if not shell.create_directory(data_bundle_directory, create_intermediates=True): logs.log_error('Failed to create data bundle %s directory.' % data_bundle.name) return False # Check if data bundle is up to date. If yes, skip the update. if _is_data_bundle_up_to_date(data_bundle, data_bundle_directory): logs.log('Data bundle was recently synced, skip.') return True # Fetch lock for this data bundle. if not _fetch_lock_for_data_bundle_update(data_bundle): logs.log_error('Failed to lock data bundle %s.' % data_bundle.name) return False # Re-check if another bot did the sync already. If yes, skip. if _is_data_bundle_up_to_date(data_bundle, data_bundle_directory): logs.log('Another bot finished the sync, skip.') _release_lock_for_data_bundle_update(data_bundle) return True time_before_sync_start = time.time() # No need to sync anything if this is a search index data bundle. In that # case, the fuzzer will generate testcases from a gcs bucket periodically. if not _is_search_index_data_bundle(data_bundle.name): bucket_url = data_handler.get_data_bundle_bucket_url(data_bundle.name) if environment.is_trusted_host() and data_bundle.sync_to_worker: from bot.untrusted_runner import corpus_manager from bot.untrusted_runner import file_host worker_data_bundle_directory = file_host.rebase_to_worker_root( data_bundle_directory) file_host.create_directory(worker_data_bundle_directory, create_intermediates=True) result = corpus_manager.RemoteGSUtilRunner().rsync( bucket_url, worker_data_bundle_directory, delete=False) else: result = gsutil.GSUtilRunner().rsync(bucket_url, data_bundle_directory, delete=False) if result.return_code != 0: logs.log_error('Failed to sync data bundle %s: %s.' % (data_bundle.name, result.output)) _release_lock_for_data_bundle_update(data_bundle) return False # Update the testcase list file. testcase_manager.create_testcase_list_file(data_bundle_directory) # Write last synced time in the sync file. sync_file_path = _get_data_bundle_sync_file_path(data_bundle_directory) utils.write_data_to_file(time_before_sync_start, sync_file_path) if environment.is_trusted_host() and data_bundle.sync_to_worker: from bot.untrusted_runner import file_host worker_sync_file_path = file_host.rebase_to_worker_root(sync_file_path) file_host.copy_file_to_worker(sync_file_path, worker_sync_file_path) # Release acquired lock. _release_lock_for_data_bundle_update(data_bundle) return True
def _run_libfuzzer_tool(tool_name, testcase, testcase_file_path, timeout, expected_crash_state, set_dedup_flags=False): """Run libFuzzer tool to either minimize or cleanse.""" memory_tool_options_var = environment.get_current_memory_tool_var() saved_memory_tool_options = environment.get_value(memory_tool_options_var) def _set_dedup_flags(): """Allow libFuzzer to do its own crash comparison during minimization.""" memory_tool_options = environment.get_memory_tool_options( memory_tool_options_var) memory_tool_options['symbolize'] = 1 memory_tool_options['dedup_token_length'] = 3 environment.set_memory_tool_options(memory_tool_options_var, memory_tool_options) def _unset_dedup_flags(): """Reset memory tool options.""" # This is needed so that when we re-run, we can symbolize ourselves # (ignoring inline frames). environment.set_value(memory_tool_options_var, saved_memory_tool_options) output_file_path = get_temporary_file_name(testcase_file_path) rebased_output_file_path = output_file_path if environment.is_trusted_host(): from bot.untrusted_runner import file_host file_host.copy_file_to_worker( testcase_file_path, file_host.rebase_to_worker_root(testcase_file_path)) rebased_output_file_path = file_host.rebase_to_worker_root(output_file_path) arguments = environment.get_value('APP_ARGS', '') arguments += (' --cf-{tool_name}-timeout={timeout} ' '--cf-{tool_name}-to={output_file_path}').format( tool_name=tool_name, output_file_path=rebased_output_file_path, timeout=timeout) command = tests.get_command_line_for_application( file_to_run=testcase_file_path, app_args=arguments, needs_http=testcase.http_flag) logs.log('Executing command: %s' % command) if set_dedup_flags: _set_dedup_flags() # A small buffer is added to the timeout to allow the current test to # finish, and file to be written. Since we should terminate beforehand, a # long delay only slows fuzzing in cases where it's necessary. _, _, output = process_handler.run_process(command, timeout=timeout + 60) if environment.is_trusted_host(): from bot.untrusted_runner import file_host file_host.copy_file_from_worker(rebased_output_file_path, output_file_path) if set_dedup_flags: _unset_dedup_flags() if not os.path.exists(output_file_path): logs.log_warn('LibFuzzer %s run failed.' % tool_name, output=output) return None, None # Ensure that the crash parameters match. It's possible that we will # minimize/cleanse to an unrelated bug, such as a timeout. crash_result = _run_libfuzzer_testcase(testcase, output_file_path) state = crash_result.get_symbolized_data() security_flag = crash_result.is_security_issue() if (security_flag != testcase.security_flag or state.crash_state != expected_crash_state): logs.log_warn('Ignoring unrelated crash.\n' 'State: %s (expected %s)\n' 'Security: %s (expected %s)\n' 'Output: %s\n' % (state.crash_state, expected_crash_state, security_flag, testcase.security_flag, state.crash_stacktrace)) return None, None with open(output_file_path, 'rb') as file_handle: minimized_keys = blobs.write_blob(file_handle) testcase.minimized_keys = minimized_keys testcase.put() return output_file_path, crash_result