def test_setup_testcase(self): """Test setup_testcase.""" self._setup_env(job_type='job') fuzz_inputs = os.environ['FUZZ_INPUTS'] testcase = data_types.Testcase() testcase.job_type = 'job' testcase.absolute_path = os.path.join(fuzz_inputs, 'testcase.ext') with tempfile.NamedTemporaryFile() as f: f.write('contents') f.seek(0) testcase.fuzzed_keys = blobs.write_blob(f) testcase.put() file_list, input_directory, testcase_file_path = ( setup.setup_testcase(testcase)) self.assertItemsEqual(file_list, [ testcase.absolute_path, ]) self.assertEqual(input_directory, fuzz_inputs) self.assertEqual(testcase_file_path, testcase.absolute_path) worker_fuzz_inputs = file_host.rebase_to_worker_root(fuzz_inputs) self.assert_dirs_equal(fuzz_inputs, worker_fuzz_inputs)
def test_minimize(self): """Test minimize.""" helpers.patch(self, ["base.utils.is_oss_fuzz"]) self.mock.is_oss_fuzz.return_value = True testcase_file_path = os.path.join(self.temp_dir, "testcase") with open(testcase_file_path, "wb") as f: f.write("EEE") with open(testcase_file_path) as f: fuzzed_keys = blobs.write_blob(f) testcase_path = os.path.join(self.temp_dir, "testcase") testcase = data_types.Testcase( crash_type="Null-dereference WRITE", crash_address="", crash_state="Foo\n", crash_stacktrace="", crash_revision=1337, fuzzed_keys=fuzzed_keys, fuzzer_name="libFuzzer", overridden_fuzzer_name="libFuzzer_test_fuzzer", job_type="libfuzzer_asan_job", original_absolute_path=testcase_path, absolute_path=testcase_path, minimized_arguments="%TESTCASE% test_fuzzer", ) testcase.put() data_types.FuzzTarget(engine="libFuzzer", binary="test_fuzzer").put() fuzzers_init.run() self._setup_env(job_type="libfuzzer_asan_job") environment.set_value("APP_ARGS", testcase.minimized_arguments) environment.set_value("LIBFUZZER_MINIMIZATION_ROUNDS", 3) environment.set_value("UBSAN_OPTIONS", "unneeded_option=1:silence_unsigned_overflow=1") minimize_task.execute_task(testcase.key.id(), "libfuzzer_asan_job") testcase = data_handler.get_testcase_by_id(testcase.key.id()) self.assertNotEqual("", testcase.minimized_keys) self.assertNotEqual("NA", testcase.minimized_keys) self.assertNotEqual(testcase.fuzzed_keys, testcase.minimized_keys) self.assertEqual( { "ASAN_OPTIONS": {}, "UBSAN_OPTIONS": { "silence_unsigned_overflow": 1 } }, testcase.get_metadata("env"), ) blobs.read_blob_to_disk(testcase.minimized_keys, testcase_path) with open(testcase_path) as f: self.assertEqual(1, len(f.read()))
def get_upload(self): """Get the upload.""" uploaded_file = self.request.POST.get('file') if not isinstance(uploaded_file, cgi.FieldStorage): raise helpers.EarlyExitException('File upload not found.', 400) bytes_io = NamedBytesIO(uploaded_file.filename, uploaded_file.file.read()) key = blobs.write_blob(bytes_io) return blobs.get_blob_info(key)
def get_upload(self): """Get the upload.""" uploaded_file = request.files.get('file') if not uploaded_file: raise helpers.EarlyExitException('File upload not found.', 400) bytes_io = NamedBytesIO(uploaded_file.filename, uploaded_file.stream.read()) key = blobs.write_blob(bytes_io) return blobs.get_blob_info(key)
def test_minimize(self): """Test minimize.""" helpers.patch(self, ['base.utils.is_oss_fuzz']) self.mock.is_oss_fuzz.return_value = True testcase_file_path = os.path.join(self.temp_dir, 'testcase') with open(testcase_file_path, 'wb') as f: f.write(b'EEE') with open(testcase_file_path) as f: fuzzed_keys = blobs.write_blob(f) testcase_path = os.path.join(self.temp_dir, 'testcase') testcase = data_types.Testcase( crash_type='Null-dereference WRITE', crash_address='', crash_state='Foo\n', crash_stacktrace='', crash_revision=1337, fuzzed_keys=fuzzed_keys, fuzzer_name='libFuzzer', overridden_fuzzer_name='libFuzzer_test_fuzzer', job_type='libfuzzer_asan_job', original_absolute_path=testcase_path, absolute_path=testcase_path, minimized_arguments='%TESTCASE% test_fuzzer') testcase.put() data_types.FuzzTarget(engine='libFuzzer', binary='test_fuzzer').put() fuzzers_init.run() self._setup_env(job_type='libfuzzer_asan_job') environment.set_value('APP_ARGS', testcase.minimized_arguments) environment.set_value('LIBFUZZER_MINIMIZATION_ROUNDS', 3) environment.set_value('UBSAN_OPTIONS', 'unneeded_option=1:silence_unsigned_overflow=1') minimize_task.execute_task(testcase.key.id(), 'libfuzzer_asan_job') testcase = data_handler.get_testcase_by_id(testcase.key.id()) self.assertNotEqual('', testcase.minimized_keys) self.assertNotEqual('NA', testcase.minimized_keys) self.assertNotEqual(testcase.fuzzed_keys, testcase.minimized_keys) self.assertEqual( { 'ASAN_OPTIONS': {}, 'UBSAN_OPTIONS': { 'silence_unsigned_overflow': 1 } }, testcase.get_metadata('env')) blobs.read_blob_to_disk(testcase.minimized_keys, testcase_path) with open(testcase_path) as f: self.assertEqual(1, len(f.read()))
def test_write_blob_file(self): """Test write_blob with a filename.""" self.mock.get.return_value = None self.assertEqual('new-key', blobs.write_blob('/file')) self.mock.copy_file_to.assert_has_calls([ mock.call('/file', '/blobs-bucket/new-key', metadata={ 'filename': 'file', }), ])
def test_write_blob_handle(self): """Test write_blob with a handle.""" self.mock.get.return_value = None handle = mock.Mock() handle.name = 'filename' self.assertEqual('new-key', blobs.write_blob(handle)) self.mock.copy_file_to.assert_has_calls([ mock.call(handle, '/blobs-bucket/new-key', metadata={ 'filename': 'filename', }), ])
def store_minidump(self): """Store the crash minidump in appengine and return key.""" if not self.minidump_info.path: return '' minidump_key = '' logs.log('Storing minidump (%s) in blobstore.' % self.minidump_info.path) try: minidump_key = '' with open(self.minidump_info.path, 'rb') as file_handle: minidump_key = blobs.write_blob(file_handle) except: logs.log_error('Failed to store minidump.') if minidump_key: self.minidump_info = FileMetadataInfo( path=self.minidump_info.path, key=minidump_key) return minidump_key
def test_minimize(self): """Test minimize.""" testcase_file_path = os.path.join(self.temp_dir, 'testcase') with open(testcase_file_path, 'wb') as f: f.write('EEE') with open(testcase_file_path) as f: fuzzed_keys = blobs.write_blob(f) testcase_path = os.path.join(self.temp_dir, 'testcase') testcase = data_types.Testcase( crash_type='Null-dereference WRITE', crash_address='', crash_state='Foo\n', crash_stacktrace='', crash_revision=1337, fuzzed_keys=fuzzed_keys, fuzzer_name='libFuzzer', overridden_fuzzer_name='libFuzzer_test_fuzzer', job_type='libfuzzer_asan_job', original_absolute_path=testcase_path, absolute_path=testcase_path, minimized_arguments='%TESTCASE% test_fuzzer') testcase.put() self._setup_env(job_type='libfuzzer_asan_job') environment.set_value('APP_ARGS', testcase.minimized_arguments) environment.set_value('LIBFUZZER_MINIMIZATION_ROUNDS', 3) minimize_task.execute_task(testcase.key.id(), 'libfuzzer_asan_job') testcase = data_handler.get_testcase_by_id(testcase.key.id()) self.assertNotEqual('', testcase.minimized_keys) self.assertNotEqual('NA', testcase.minimized_keys) self.assertNotEqual(testcase.fuzzed_keys, testcase.minimized_keys) self.assertEqual({'ASAN_OPTIONS': {}}, testcase.get_metadata('env')) blobs.read_blob_to_disk(testcase.minimized_keys, testcase_path) with open(testcase_path) as f: self.assertEqual(1, len(f.read()))
def filter_stacktrace(stacktrace): """Filters stacktrace and returns content appropriate for storage as an appengine entity.""" unicode_stacktrace = utils.decode_to_unicode(stacktrace) if len(unicode_stacktrace) <= data_types.STACKTRACE_LENGTH_LIMIT: return unicode_stacktrace tmpdir = environment.get_value('BOT_TMPDIR') tmp_stacktrace_file = os.path.join(tmpdir, 'stacktrace.tmp') try: with open(tmp_stacktrace_file, 'w') as handle: handle.write(stacktrace) with open(tmp_stacktrace_file, 'r') as handle: key = blobs.write_blob(handle) except Exception: logs.log_error('Unable to write crash stacktrace to temporary file.') shell.remove_file(tmp_stacktrace_file) return unicode_stacktrace[(-1 * data_types.STACKTRACE_LENGTH_LIMIT):] shell.remove_file(tmp_stacktrace_file) return '%s%s' % (data_types.BLOBSTORE_STACK_PREFIX, key)
def archive_testcase_and_dependencies_in_gcs(resource_list, testcase_path): """Archive testcase and its dependencies, and store in blobstore.""" if not os.path.exists(testcase_path): logs.log_error('Unable to find testcase %s.' % testcase_path) return None, None, None, None absolute_filename = testcase_path archived = False zip_filename = None zip_path = None if not resource_list: resource_list = [] # Add resource dependencies based on testcase path. These include # stuff like extensions directory, dependency files, etc. resource_list.extend( testcase_manager.get_resource_dependencies(testcase_path)) # Filter out duplicates, directories, and files that do not exist. resource_list = utils.filter_file_list(resource_list) logs.log('Testcase and related files :\n%s' % str(resource_list)) if len(resource_list) <= 1: # If this does not have any resources, just save the testcase. # TODO(flowerhack): Update this when we teach CF how to download testcases. try: file_handle = open(testcase_path, 'rb') except IOError: logs.log_error('Unable to open testcase %s.' % testcase_path) return None, None, None, None else: # If there are resources, create an archive. # Find the common root directory for all of the resources. # Assumption: resource_list[0] is the testcase path. base_directory_list = resource_list[0].split(os.path.sep) for list_index in range(1, len(resource_list)): current_directory_list = resource_list[list_index].split( os.path.sep) length = min(len(base_directory_list), len(current_directory_list)) for directory_index in range(length): if (current_directory_list[directory_index] != base_directory_list[directory_index]): base_directory_list = base_directory_list[ 0:directory_index] break base_directory = os.path.sep.join(base_directory_list) logs.log('Subresource common base directory: %s' % base_directory) if base_directory: # Common parent directory, archive sub-paths only. base_len = len(base_directory) + len(os.path.sep) else: # No common parent directory, archive all paths as it-is. base_len = 0 # Prepare the filename for the archive. zip_filename, _ = os.path.splitext(os.path.basename(testcase_path)) zip_filename += _TESTCASE_ARCHIVE_EXTENSION # Create the archive. zip_path = os.path.join(environment.get_value('INPUT_DIR'), zip_filename) zip_file = zipfile.ZipFile(zip_path, 'w') for file_name in resource_list: if os.path.exists(file_name): relative_filename = file_name[base_len:] zip_file.write(file_name, relative_filename, zipfile.ZIP_DEFLATED) zip_file.close() try: file_handle = open(zip_path, 'rb') except IOError: logs.log_error('Unable to open testcase archive %s.' % zip_path) return None, None, None, None archived = True absolute_filename = testcase_path[base_len:] fuzzed_key = blobs.write_blob(file_handle) file_handle.close() # Don't need the archive after writing testcase to blobstore. if zip_path: shell.remove_file(zip_path) return fuzzed_key, archived, absolute_filename, zip_filename
def _process_corpus_crashes(context, result): """Process crashes found in the corpus.""" # Default Testcase entity values. crash_revision = result.revision job_type = environment.get_value("JOB_NAME") minimized_arguments = "%TESTCASE% " + context.fuzz_target.binary project_name = data_handler.get_project_name(job_type) comment = "Fuzzer %s generated corpus testcase crashed (r%s)" % ( context.fuzz_target.project_qualified_name(), crash_revision, ) # Generate crash reports. for crash in result.crashes: existing_testcase = data_handler.find_testcase(project_name, crash.crash_type, crash.crash_state, crash.security_flag) if existing_testcase: continue # Upload/store testcase. if environment.is_trusted_host(): from bot.untrusted_runner import file_host unit_path = os.path.join(context.bad_units_path, os.path.basename(crash.unit_path)) # Prevent the worker from escaping out of |context.bad_units_path|. if not file_host.is_directory_parent(unit_path, context.bad_units_path): raise CorpusPruningException("Invalid units path from worker.") file_host.copy_file_from_worker(crash.unit_path, unit_path) else: unit_path = crash.unit_path with open(unit_path, "rb") as f: key = blobs.write_blob(f) # Set the absolute_path property of the Testcase to a file in FUZZ_INPUTS # instead of the local quarantine directory. absolute_testcase_path = os.path.join( environment.get_value("FUZZ_INPUTS"), "testcase") testcase_id = data_handler.store_testcase( crash=crash, fuzzed_keys=key, minimized_keys="", regression="", fixed="", one_time_crasher_flag=False, crash_revision=crash_revision, comment=comment, absolute_path=absolute_testcase_path, fuzzer_name=context.fuzz_target.engine, fully_qualified_fuzzer_name=context.fuzz_target. fully_qualified_name(), job_type=job_type, archived=False, archive_filename="", binary_flag=True, http_flag=False, gestures=None, redzone=DEFAULT_REDZONE, disable_ubsan=False, minidump_keys=None, window_argument=None, timeout_multiplier=1.0, minimized_arguments=minimized_arguments, ) # Set fuzzer_binary_name in testcase metadata. testcase = data_handler.get_testcase_by_id(testcase_id) testcase.set_metadata("fuzzer_binary_name", result.fuzzer_binary_name) issue_metadata = engine_common.get_all_issue_metadata_for_testcase( testcase) if issue_metadata: for key, value in issue_metadata.items(): testcase.set_metadata(key, value, update_testcase=False) testcase.put() # Create additional tasks for testcase (starting with minimization). testcase = data_handler.get_testcase_by_id(testcase_id) task_creation.create_tasks(testcase)
def execute_task(metadata_id, job_type): """Unpack a bundled testcase archive and create analyze jobs for each item.""" metadata = ndb.Key(data_types.BundledArchiveMetadata, int(metadata_id)).get() if not metadata: logs.log_error('Invalid bundle metadata id %s.' % metadata_id) return bot_name = environment.get_value('BOT_NAME') upload_metadata = data_types.TestcaseUploadMetadata.query( data_types.TestcaseUploadMetadata.blobstore_key == metadata.blobstore_key).get() if not upload_metadata: logs.log_error('Invalid upload metadata key %s.' % metadata.blobstore_key) return # Update the upload metadata with this bot name. upload_metadata.bot_name = bot_name upload_metadata.put() # We can't use FUZZ_INPUTS directory since it is constrained # by tmpfs limits. testcases_directory = environment.get_value('FUZZ_INPUTS_DISK') # Retrieve multi-testcase archive. archive_path = os.path.join(testcases_directory, metadata.archive_filename) if not blobs.read_blob_to_disk(metadata.blobstore_key, archive_path): logs.log_error('Could not retrieve archive for bundle %d.' % metadata_id) tasks.add_task('unpack', metadata_id, job_type) return try: archive.unpack(archive_path, testcases_directory) except: logs.log_error('Could not unpack archive for bundle %d.' % metadata_id) tasks.add_task('unpack', metadata_id, job_type) return archive_state = data_types.ArchiveStatus.NONE bundled = True file_list = archive.get_file_list(archive_path) for file_path in file_list: absolute_file_path = os.path.join(testcases_directory, file_path) filename = os.path.basename(absolute_file_path) # Only files are actual testcases. Skip directories. if not os.path.isfile(absolute_file_path): continue try: file_handle = open(absolute_file_path, 'rb') blob_key = blobs.write_blob(file_handle) file_handle.close() except: blob_key = None if not blob_key: logs.log_error( 'Could not write testcase %s to blobstore.' % absolute_file_path) continue data_handler.create_user_uploaded_testcase( blob_key, metadata.blobstore_key, archive_state, metadata.archive_filename, filename, metadata.timeout, metadata.job_type, metadata.job_queue, metadata.http_flag, metadata.gestures, metadata.additional_arguments, metadata.bug_information, metadata.crash_revision, metadata.uploader_email, metadata.platform_id, metadata.app_launch_command, metadata.fuzzer_name, metadata.overridden_fuzzer_name, metadata.fuzzer_binary_name, bundled, upload_metadata.retries, upload_metadata.bug_summary_update_flag) # The upload metadata for the archive is not needed anymore since we created # one for each testcase. upload_metadata.key.delete() shell.clear_testcase_directories()
def store_minimized_testcase(testcase, base_directory, file_list, file_to_run_data, file_to_run): """Store all files that make up this testcase.""" # Write the main file data. utils.write_data_to_file(file_to_run_data, file_to_run) # Prepare the file. zip_path = None if testcase.archive_state: if len(file_list) > 1: testcase.archive_state |= data_types.ArchiveStatus.MINIMIZED zip_path = os.path.join( environment.get_value('INPUT_DIR'), '%d.zip' % testcase.key.id()) zip_file = zipfile.ZipFile(zip_path, 'w') count = 0 filtered_file_list = [] for file_name in file_list: absolute_filename = os.path.join(base_directory, file_name) is_file = os.path.isfile(absolute_filename) if file_to_run_data and is_file and os.path.getsize( absolute_filename) == 0 and ( os.path.basename(absolute_filename) not in file_to_run_data): continue if not os.path.exists(absolute_filename): continue zip_file.write(absolute_filename, file_name, zipfile.ZIP_DEFLATED) if is_file: count += 1 filtered_file_list.append(absolute_filename) zip_file.close() try: if count > 1: file_handle = open(zip_path, 'rb') else: if not filtered_file_list: # We minimized everything. The only thing needed to reproduce is the # interaction gesture. file_path = file_list[0] file_handle = open(file_path, 'wb') file_handle.close() else: file_path = filtered_file_list[0] file_handle = open(file_path, 'rb') testcase.absolute_path = os.path.join(base_directory, os.path.basename(file_path)) testcase.archive_state &= ~data_types.ArchiveStatus.MINIMIZED except IOError: testcase.put() # Preserve what we can. logs.log_error('Unable to open archive for blobstore write.') return else: absolute_filename = os.path.join(base_directory, file_list[0]) file_handle = open(absolute_filename, 'rb') testcase.archive_state &= ~data_types.ArchiveStatus.MINIMIZED else: file_handle = open(file_list[0], 'rb') testcase.archive_state &= ~data_types.ArchiveStatus.MINIMIZED # Store the testcase. minimized_keys = blobs.write_blob(file_handle) file_handle.close() testcase.minimized_keys = minimized_keys testcase.put() if zip_path: shell.remove_file(zip_path)
def _run_libfuzzer_tool(tool_name, testcase, testcase_file_path, timeout, expected_crash_state, set_dedup_flags=False): """Run libFuzzer tool to either minimize or cleanse.""" memory_tool_options_var = environment.get_current_memory_tool_var() saved_memory_tool_options = environment.get_value(memory_tool_options_var) def _set_dedup_flags(): """Allow libFuzzer to do its own crash comparison during minimization.""" memory_tool_options = environment.get_memory_tool_options( memory_tool_options_var) memory_tool_options['symbolize'] = 1 memory_tool_options['dedup_token_length'] = 3 environment.set_memory_tool_options(memory_tool_options_var, memory_tool_options) def _unset_dedup_flags(): """Reset memory tool options.""" # This is needed so that when we re-run, we can symbolize ourselves # (ignoring inline frames). environment.set_value(memory_tool_options_var, saved_memory_tool_options) output_file_path = get_temporary_file_name(testcase_file_path) rebased_output_file_path = output_file_path if environment.is_trusted_host(): from bot.untrusted_runner import file_host file_host.copy_file_to_worker( testcase_file_path, file_host.rebase_to_worker_root(testcase_file_path)) rebased_output_file_path = file_host.rebase_to_worker_root(output_file_path) arguments = environment.get_value('APP_ARGS', '') arguments += (' --cf-{tool_name}-timeout={timeout} ' '--cf-{tool_name}-to={output_file_path}').format( tool_name=tool_name, output_file_path=rebased_output_file_path, timeout=timeout) command = tests.get_command_line_for_application( file_to_run=testcase_file_path, app_args=arguments, needs_http=testcase.http_flag) logs.log('Executing command: %s' % command) if set_dedup_flags: _set_dedup_flags() # A small buffer is added to the timeout to allow the current test to # finish, and file to be written. Since we should terminate beforehand, a # long delay only slows fuzzing in cases where it's necessary. _, _, output = process_handler.run_process(command, timeout=timeout + 60) if environment.is_trusted_host(): from bot.untrusted_runner import file_host file_host.copy_file_from_worker(rebased_output_file_path, output_file_path) if set_dedup_flags: _unset_dedup_flags() if not os.path.exists(output_file_path): logs.log_warn('LibFuzzer %s run failed.' % tool_name, output=output) return None, None # Ensure that the crash parameters match. It's possible that we will # minimize/cleanse to an unrelated bug, such as a timeout. crash_result = _run_libfuzzer_testcase(testcase, output_file_path) state = crash_result.get_symbolized_data() security_flag = crash_result.is_security_issue() if (security_flag != testcase.security_flag or state.crash_state != expected_crash_state): logs.log_warn('Ignoring unrelated crash.\n' 'State: %s (expected %s)\n' 'Security: %s (expected %s)\n' 'Output: %s\n' % (state.crash_state, expected_crash_state, security_flag, testcase.security_flag, state.crash_stacktrace)) return None, None with open(output_file_path, 'rb') as file_handle: minimized_keys = blobs.write_blob(file_handle) testcase.minimized_keys = minimized_keys testcase.put() return output_file_path, crash_result