def test_rsync_error_below_threshold_with_not_found_errors(self):
        """Test rsync returning errors (below threshold, but with not found errors
    and overall error count more than threshold)."""
        output = (
            b'blah\n' + b'[Errno 2] No such file or directory\n' * 10 +
            b'NotFoundException: 404 gs://bucket/file001 does not exist.\n' *
            180 +
            b'CommandException: 200 files/objects could not be copied/removed.\n'
        )

        self.mock._count_corpus_files.return_value = 10  # pylint: disable=protected-access
        self.mock.run_gsutil.return_value = new_process.ProcessResult(
            command=['/fake'],
            return_code=1,
            output=output,
            time_executed=10.0,
            timed_out=False,
        )

        corpus = corpus_manager.GcsCorpus('bucket')
        self.assertTrue(corpus.rsync_to_disk('/dir', timeout=60))

        self.mock.run_gsutil.return_value = new_process.ProcessResult(
            command=['/fake'],
            return_code=1,
            output=output,
            time_executed=30.0,
            timed_out=True,
        )
        self.assertFalse(corpus.rsync_to_disk('/dir', timeout=60))
    def test_rsync_error_below_threshold(self):
        """Test rsync returning errors (but they're below threshold)."""
        output = (
            b'blah\n'
            b'blah\n'
            b'CommandException: 10 files/objects could not be copied/removed.\n'
        )

        self.mock._count_corpus_files.return_value = 10  # pylint: disable=protected-access
        self.mock.run_gsutil.return_value = new_process.ProcessResult(
            command=['/fake'],
            return_code=1,
            output=output,
            time_executed=10.0,
            timed_out=False,
        )

        corpus = corpus_manager.GcsCorpus('bucket')
        self.assertTrue(corpus.rsync_to_disk('/dir', timeout=60))

        self.mock.run_gsutil.return_value = new_process.ProcessResult(
            command=['/fake'],
            return_code=1,
            output=output,
            time_executed=30.0,
            timed_out=True,
        )
        self.assertFalse(corpus.rsync_to_disk('/dir', timeout=60))
Exemple #3
0
    def test_run_and_wait_none_env(self):
        """Test RemoteProcessRunner.run_and_wait() with a None env."""
        process_result = new_process.ProcessResult(['command', '123'], 0,
                                                   b'output', 60.0, False)

        self.mock.stub().RunAndWait.return_value = (
            untrusted_runner_pb2.RunAndWaitResponse(
                result=remote_process.process_result_to_proto(process_result)))

        runner = remote_process_host.RemoteProcessRunner(
            '/executable', ['-default_arg'])
        result = runner.run_and_wait()
        result = remote_process_host.process_result_from_proto(result)

        request = self.mock.stub().RunAndWait.call_args[0][0]
        self.assertEqual('/executable', request.executable_path)
        self.assertEqual(['-default_arg'], request.default_args)
        self.assertEqual([], request.additional_args)
        self.assertFalse(request.HasField('timeout'))
        self.assertFalse(request.terminate_before_kill)
        self.assertFalse(request.HasField('terminate_wait_time'))
        self.assertFalse(request.HasField('input_data'))
        self.assertFalse(request.popen_args.HasField('bufsize'))
        self.assertFalse(request.popen_args.HasField('executable'))
        self.assertFalse(request.popen_args.HasField('shell'))
        self.assertFalse(request.popen_args.HasField('cwd'))
        self.assertEqual({}, request.popen_args.env)
        self.assertFalse(request.popen_args.env_is_set)

        self.assertEqual(result.command, process_result.command)
        self.assertEqual(result.return_code, process_result.return_code)
        self.assertEqual(result.output, process_result.output)
        self.assertEqual(result.time_executed, process_result.time_executed)
        self.assertEqual(result.timed_out, process_result.timed_out)
Exemple #4
0
def process_result_from_proto(process_result_proto):
    """Convert ProcessResult proto to new_process.ProcessResult."""
    return new_process.ProcessResult(process_result_proto.command,
                                     process_result_proto.return_code,
                                     process_result_proto.output,
                                     process_result_proto.time_executed,
                                     process_result_proto.timed_out)
Exemple #5
0
    def test_run_and_wait_empty_env(self, mock_run_and_wait):
        """Test remote_process.run_and_wait() with an empty env."""
        process_result = new_process.ProcessResult(['command', '123'], 0,
                                                   b'output', 60.0, False)

        mock_run_and_wait.return_value = process_result

        request = untrusted_runner_pb2.RunAndWaitRequest()
        request.executable_path = '/path'
        request.popen_args.env_is_set = True

        remote_process.run_and_wait(request, None)
        mock_run_and_wait.assert_called_with(additional_args=[], env={})
Exemple #6
0
    def run_and_loop(self,
                     *args,
                     timeout=None,
                     **kwargs) -> new_process.ProcessResult:
        """Adds looping call to run_and_wait method.

    This method adds LoopingTimer() that continuously executes a function
    that gets / saves rawcover data from Syzkaller.

    Args:
      *args: args for self.run()
      timeout: timeout in seconds to stop Syzkaller
      **kwargs: kwargs for self.run()
    Returns:
      new_process.ProcessResult from Syzkaller
    """
        process = self.run(*args, **kwargs)
        pid = process.popen.pid
        logs.log(f'Syzkaller pid = {pid}')

        looping_timer = LoopingTimer(
            RAWCOVER_RETRIEVE_INTERVAL,
            self.save_rawcover_output,
            args=[pid],
        )
        looping_timer.start()

        try:
            if not timeout:
                start_time = time.time()
                output = process.communicate()[0]
                return new_process.ProcessResult(process.command,
                                                 process.poll(), output,
                                                 time.time() - start_time,
                                                 False)

            result = new_process.wait_process(
                process,
                timeout=timeout,
                input_data=None,
                terminate_before_kill=False,
                terminate_wait_time=None,
            )
            result.command = process.command
            result.output = str(result.output, 'utf-8')

            return result
        finally:
            looping_timer.cancel()
    def setUp(self):
        super().setUp()
        test_helpers.patch_environ(self)
        test_helpers.patch(self, [
            'clusterfuzz._internal.bot.fuzzers.engine_common.is_lpm_fuzz_target'
        ])
        self.mock.is_lpm_fuzz_target.return_value = True
        environment.set_value('HARD_TIMEOUT_OVERRIDE', 600)
        config = launcher.AflConfig.from_target_path(self.TARGET_PATH)

        self.runner = launcher.AflRunner(self.TARGET_PATH, config,
                                         self.TESTCASE_FILE_PATH,
                                         self.INPUT_DIR)

        self.fuzz_result = new_process.ProcessResult()
        self.args = ['-iinput1', '-ooutput', '123', '456']
        self.times_called = 0
    def _process_result(self,
                        command=None,
                        output=None,
                        return_code=0,
                        time_executed=20,
                        timed_out=False):
        """Creates a new_process.ProcessResult with specified values or good
    defaults."""
        if command is None:
            command = ['afl-fuzz', '-iin', '-oout', './fuzzer']
        if output is None:
            output = self.ARBITRARY_OUTPUT

        return new_process.ProcessResult(command=command,
                                         output=output,
                                         return_code=return_code,
                                         time_executed=time_executed,
                                         timed_out=timed_out)
    def test_rsync_error_above_threshold(self):
        """Test rsync returning errors (above threshold)."""
        output = (
            b'blah\n'
            b'blah\n'
            b'CommandException: 11 files/objects could not be copied/removed.\n'
        )

        self.mock.run_gsutil.return_value = new_process.ProcessResult(
            command=['/fake'],
            return_code=1,
            output=output,
            time_executed=10.0,
            timed_out=False,
        )

        corpus = corpus_manager.GcsCorpus('bucket')
        self.assertFalse(corpus.rsync_to_disk('/dir', timeout=60))
Exemple #10
0
    def test_run_and_wait(self):
        """Test RemoteProcessRunner.run_and_wait()."""
        process_result = new_process.ProcessResult(['command', '123'], 0,
                                                   b'output', 60.0, False)

        self.mock.stub().RunAndWait.return_value = (
            untrusted_runner_pb2.RunAndWaitResponse(
                result=remote_process.process_result_to_proto(process_result)))

        runner = remote_process_host.RemoteProcessRunner(
            '/executable', ['-default_arg'])
        result = runner.run_and_wait(['-additional_arg'],
                                     100.0,
                                     True,
                                     10.0,
                                     b'input',
                                     shell=True,
                                     env={'ASAN_OPTIONS': 'asan_options'},
                                     cwd='/',
                                     max_stdout_len=1337)
        result = remote_process_host.process_result_from_proto(result)

        request = self.mock.stub().RunAndWait.call_args[0][0]
        self.assertEqual('/executable', request.executable_path)
        self.assertEqual(['-default_arg'], request.default_args)
        self.assertEqual(['-additional_arg'], request.additional_args)
        self.assertEqual(100.0, request.timeout)
        self.assertTrue(request.terminate_before_kill)
        self.assertEqual(10.0, request.terminate_wait_time)
        self.assertEqual(b'input', request.input_data)
        self.assertFalse(request.popen_args.HasField('bufsize'))
        self.assertFalse(request.popen_args.HasField('executable'))
        self.assertTrue(request.popen_args.shell)
        self.assertEqual('/', request.popen_args.cwd)
        self.assertEqual({'ASAN_OPTIONS': 'asan_options'},
                         request.popen_args.env)
        self.assertTrue(request.popen_args.env_is_set)
        self.assertEqual(1337, request.max_stdout_len)

        self.assertEqual(result.command, process_result.command)
        self.assertEqual(result.return_code, process_result.return_code)
        self.assertEqual(result.output, process_result.output)
        self.assertEqual(result.time_executed, process_result.time_executed)
        self.assertEqual(result.timed_out, process_result.timed_out)
    def test_repro_successful(self, mock_process_runner):
        """Repro successfully finds crash log."""
        output = SYZ_CRUSH_OUTPUT_TEMPLATE.format(
            reproduce_log=SYZ_CRUSH_LOG_LOCATION,
            crash=1,
        )

        mock_process_runner.run_and_wait = mock.Mock(
            return_value=new_process.ProcessResult(
                command=SYZ_CRUSH_COMMAND,
                return_code=1,
                output=output,
            ))

        with open(f'{TEST_PATH}/reproducer11', 'r') as file:
            actual = self.target.repro(0, [])
            self.assertEqual(actual.command, SYZ_CRUSH_COMMAND)
            self.assertEqual(actual.return_code, 1)
            self.assertEqual(actual.output, file.read())
Exemple #12
0
def mocked_fuzz(runner):
    """Mocked version of AflRunner.fuzz."""
    fuzz_args = runner.generate_afl_args()

    runner._fuzz_args = fuzz_args  # pylint: disable=protected-access
    engine_common.recreate_directory(runner.afl_output.output_directory)
    runner._fuzzer_stderr = ''  # pylint: disable=protected-access

    # Create the queue directory within AFL's output directory.
    queue = runner.afl_output.queue
    engine_common.recreate_directory(queue)
    new_corpus_dir = os.path.join(DATA_DIRECTORY, 'merge_new_corpus')
    for filename in os.listdir(new_corpus_dir):
        src = os.path.join(new_corpus_dir, filename)
        dst = os.path.join(queue, filename)
        shutil.copy(src, dst)

    return new_process.ProcessResult(command=[],
                                     return_code=0,
                                     output='',
                                     time_executed=1)
    def test_repro_no_crash(self, mock_process_runner):
        """Repro retries when crash fails to reproduce."""
        output = SYZ_CRUSH_OUTPUT_TEMPLATE.format(
            reproduce_log=SYZ_CRUSH_LOG_LOCATION,
            crash=0,
        )

        mock_process_runner.run_and_wait = mock.Mock(
            return_value=new_process.ProcessResult(
                command=SYZ_CRUSH_COMMAND,
                return_code=1,
                output=output,
            ))

        actual = self.target.repro(0, [])
        self.assertEqual(actual.return_code, 0)

        self.assertEqual(
            mock_process_runner.run_and_wait.call_count,
            REPRO_RETRY_MAX,
        )
Exemple #14
0
    def test_run_and_wait(self, mock_run_and_wait):
        """Test remote_process.run_and_wait()."""
        process_result = new_process.ProcessResult(['command', '123'], 0,
                                                   b'output', 60.0, False)

        mock_run_and_wait.return_value = process_result

        request = untrusted_runner_pb2.RunAndWaitRequest()
        request.executable_path = '/path'
        request.default_args.extend(['-default_arg'])
        request.additional_args.extend(['-additional_arg'])
        request.timeout = 100.0
        request.terminate_before_kill = True
        request.terminate_wait_time = 10.0
        request.input_data = b'input'
        request.max_stdout_len = 1337
        request.popen_args.shell = True
        request.popen_args.env.update({'VAR': 'VAL'})
        request.popen_args.env_is_set = True
        request.popen_args.cwd = '/'

        response = remote_process.run_and_wait(request, None)
        result = remote_process_host.process_result_from_proto(response.result)

        mock_run_and_wait.assert_called_with(
            additional_args=['-additional_arg'],
            timeout=100.0,
            terminate_before_kill=True,
            terminate_wait_time=10.0,
            input_data=b'input',
            max_stdout_len=1337,
            cwd='/',
            env={'VAR': 'VAL'},
            shell=True)

        self.assertEqual(result.command, process_result.command)
        self.assertEqual(result.return_code, process_result.return_code)
        self.assertEqual(result.output, process_result.output)
        self.assertEqual(result.time_executed, process_result.time_executed)
        self.assertEqual(result.timed_out, process_result.timed_out)
  def setUp(self):
    test_helpers.patch_environ(self)

    self.fuzzer_name = 'fake_fuzzer'
    self.full_fuzzer_name = 'libFuzzer_fake_fuzzer'
    self.job_type = 'fake_job'
    self.temp_dir = tempfile.mkdtemp()

    data_types.FuzzTarget(
        engine='libFuzzer', binary='fake_fuzzer', project='test-project').put()

    os.environ['FUZZ_INPUTS_DISK'] = self.temp_dir

    test_helpers.patch(self, [
        'clusterfuzz._internal.bot.tasks.ml_train_utils.get_corpus',
        'clusterfuzz._internal.bot.tasks.train_rnn_generator_task.train_rnn',
        'clusterfuzz._internal.bot.tasks.train_rnn_generator_task.upload_model_to_gcs',
    ])

    self.mock.get_corpus.return_value = True
    self.mock.train_rnn.return_value = new_process.ProcessResult(return_code=0)
    self.mock.upload_model_to_gcs.return_value = True