def test_rsync_error_below_threshold_with_not_found_errors(self): """Test rsync returning errors (below threshold, but with not found errors and overall error count more than threshold).""" output = ( 'blah\n' + 'NotFoundException: 404 gs://bucket/file001 does not exist.\n' * 190 + 'CommandException: 200 files/objects could not be copied/removed.\n' ) self.mock._count_corpus_files.return_value = 10 # pylint: disable=protected-access self.mock.run_gsutil.return_value = new_process.ProcessResult( command=['/fake'], return_code=1, output=output, time_executed=10.0, timed_out=False, ) corpus = corpus_manager.GcsCorpus('bucket') self.assertTrue(corpus.rsync_to_disk('/dir', timeout=60)) self.mock.run_gsutil.return_value = new_process.ProcessResult( command=['/fake'], return_code=1, output=output, time_executed=30.0, timed_out=True, ) self.assertFalse(corpus.rsync_to_disk('/dir', timeout=60))
def test_rsync_error_below_threshold(self): """Test rsync returning errors (but they're below threshold).""" output = ( 'blah\n' 'blah\n' 'CommandException: 10 files/objects could not be copied/removed.\n' ) self.mock._count_corpus_files.return_value = 10 # pylint: disable=protected-access self.mock.run_gsutil.return_value = new_process.ProcessResult( command=['/fake'], return_code=1, output=output, time_executed=10.0, timed_out=False, ) corpus = corpus_manager.GcsCorpus('bucket') self.assertTrue(corpus.rsync_to_disk('/dir', timeout=60)) self.mock.run_gsutil.return_value = new_process.ProcessResult( command=['/fake'], return_code=1, output=output, time_executed=30.0, timed_out=True, ) self.assertFalse(corpus.rsync_to_disk('/dir', timeout=60))
def test_rsync_from_disk(self): """Test rsync_from_disk.""" self.mock.cpu_count.return_value = 1 corpus = corpus_manager.GcsCorpus('bucket') self.assertTrue(corpus.rsync_from_disk('/dir')) self.assertEqual(self.mock.Popen.call_args[0][0], [ '/gsutil_path/gsutil', '-m', '-o', 'GSUtil:parallel_thread_count=16', '-q', 'rsync', '-r', '-d', '/dir', 'gs://bucket/' ]) self.mock.cpu_count.return_value = 2 corpus = corpus_manager.GcsCorpus('bucket') self.assertTrue(corpus.rsync_from_disk('/dir')) self.assertEqual(self.mock.Popen.call_args[0][0], [ '/gsutil_path/gsutil', '-m', '-q', 'rsync', '-r', '-d', '/dir', 'gs://bucket/' ])
def test_upload_files(self): """Test upload_files.""" mock_popen = self.mock.Popen.return_value self.mock.cpu_count.return_value = 1 corpus = corpus_manager.GcsCorpus('bucket') self.assertTrue(corpus.upload_files(['/dir/a', '/dir/b'])) self.assertEqual(self.mock.Popen.call_args[0][0], [ '/gsutil_path/gsutil', '-m', '-o', 'GSUtil:parallel_thread_count=16', 'cp', '-I', 'gs://bucket/' ]) mock_popen.communicate.assert_called_with('/dir/a\n/dir/b') self.mock.cpu_count.return_value = 2 corpus = corpus_manager.GcsCorpus('bucket') self.assertTrue(corpus.upload_files(['/dir/a', '/dir/b'])) self.assertEqual(self.mock.Popen.call_args[0][0], ['/gsutil_path/gsutil', '-m', 'cp', '-I', 'gs://bucket/'])
def __init__(self, fuzz_target, cross_pollinate_fuzzers, cross_pollination_method=Pollination.RANDOM, tag=None): self.fuzz_target = fuzz_target self.cross_pollinate_fuzzers = cross_pollinate_fuzzers self.cross_pollination_method = cross_pollination_method self.tag = tag self.merge_tmp_dir = None self.engine = engine.get(self.fuzz_target.engine) if not self.engine: raise CorpusPruningException('Engine {} not found'.format(engine)) self._created_directories = [] # Set up temporary directories where corpora will be synced to. # Initial synced corpus. self.initial_corpus_path = self._create_temp_corpus_directory( '%s_initial_corpus' % self.fuzz_target.project_qualified_name()) # Minimized corpus. self.minimized_corpus_path = self._create_temp_corpus_directory( '%s_minimized_corpus' % self.fuzz_target.project_qualified_name()) # Synced quarantine corpus. self.quarantine_corpus_path = self._create_temp_corpus_directory( '%s_quarantine' % self.fuzz_target.project_qualified_name()) # Synced shared corpus. self.shared_corpus_path = self._create_temp_corpus_directory( '%s_shared' % self.fuzz_target.project_qualified_name()) # Bad units. self.bad_units_path = self._create_temp_corpus_directory( '%s_bad_units' % self.fuzz_target.project_qualified_name()) self.merge_tmp_dir = self._create_temp_corpus_directory( 'merge_workdir') self.corpus = corpus_manager.FuzzTargetCorpus( self.fuzz_target.engine, self.fuzz_target.project_qualified_name(), include_regressions=True) self.quarantine_corpus = corpus_manager.FuzzTargetCorpus( self.fuzz_target.engine, self.fuzz_target.project_qualified_name(), quarantine=True) shared_corpus_bucket = environment.get_value('SHARED_CORPUS_BUCKET') self.shared_corpus = corpus_manager.GcsCorpus(shared_corpus_bucket)
def test_rsync_error_above_threshold(self): """Test rsync returning errors (above threshold).""" output = ( 'blah\n' 'blah\n' 'CommandException: 11 files/objects could not be copied/removed.\n') self.mock.run_gsutil.return_value = new_process.ProcessResult( command=['/fake'], return_code=1, output=output, time_executed=10.0, timed_out=False, ) corpus = corpus_manager.GcsCorpus('bucket') self.assertFalse(corpus.rsync_to_disk('/dir', timeout=60))
def __init__(self, fuzz_target, cross_pollinate_fuzzers): self.fuzz_target = fuzz_target self.cross_pollinate_fuzzers = cross_pollinate_fuzzers self.merge_tmp_dir = None self.engine = engine.get(self.fuzz_target.engine) if not self.engine: raise CorpusPruningException("Engine {} not found".format(engine)) self._created_directories = [] # Set up temporary directories where corpora will be synced to. # Initial synced corpus. self.initial_corpus_path = self._create_temp_corpus_directory( "%s_initial_corpus" % self.fuzz_target.project_qualified_name()) # Minimized corpus. self.minimized_corpus_path = self._create_temp_corpus_directory( "%s_minimized_corpus" % self.fuzz_target.project_qualified_name()) # Synced quarantine corpus. self.quarantine_corpus_path = self._create_temp_corpus_directory( "%s_quarantine" % self.fuzz_target.project_qualified_name()) # Synced shared corpus. self.shared_corpus_path = self._create_temp_corpus_directory( "%s_shared" % self.fuzz_target.project_qualified_name()) # Bad units. self.bad_units_path = self._create_temp_corpus_directory( "%s_bad_units" % self.fuzz_target.project_qualified_name()) self.merge_tmp_dir = self._create_temp_corpus_directory( "merge_workdir") self.corpus = corpus_manager.FuzzTargetCorpus( self.fuzz_target.engine, self.fuzz_target.project_qualified_name()) self.quarantine_corpus = corpus_manager.FuzzTargetCorpus( self.fuzz_target.engine, self.fuzz_target.project_qualified_name(), quarantine=True, ) shared_corpus_bucket = environment.get_value("SHARED_CORPUS_BUCKET") self.shared_corpus = corpus_manager.GcsCorpus(shared_corpus_bucket)
def __init__(self, fuzz_target, cross_pollinate_fuzzers, use_minijail): self.fuzz_target = fuzz_target self.cross_pollinate_fuzzers = cross_pollinate_fuzzers self.use_minijail = use_minijail self.merge_tmp_dir = None self._created_directories = [] # Set up temporary directories where corpora will be synced to. # Initial synced corpus. self.initial_corpus_path = self._create_temp_corpus_directory( '%s_initial_corpus' % self.fuzz_target.project_qualified_name()) # Minimized corpus. self.minimized_corpus_path = self._create_temp_corpus_directory( '%s_minimized_corpus' % self.fuzz_target.project_qualified_name()) # Synced quarantine corpus. self.quarantine_corpus_path = self._create_temp_corpus_directory( '%s_quarantine' % self.fuzz_target.project_qualified_name()) # Synced shared corpus. self.shared_corpus_path = self._create_temp_corpus_directory( '%s_shared' % self.fuzz_target.project_qualified_name()) # Bad units. self.bad_units_path = self._create_temp_corpus_directory( '%s_bad_units' % self.fuzz_target.project_qualified_name()) if not self.use_minijail: self.merge_tmp_dir = self._create_temp_corpus_directory( 'merge_workdir') self.corpus = corpus_manager.FuzzTargetCorpus( self.fuzz_target.engine, self.fuzz_target.project_qualified_name()) self.quarantine_corpus = corpus_manager.FuzzTargetCorpus( self.fuzz_target.engine, self.fuzz_target.project_qualified_name(), quarantine=True) shared_corpus_bucket = environment.get_value('SHARED_CORPUS_BUCKET') self.shared_corpus = corpus_manager.GcsCorpus(shared_corpus_bucket)