Example #1
0
    def test_rsync_error_below_threshold(self):
        """Test rsync returning errors (but they're below threshold)."""
        output = (
            'blah\n'
            'blah\n'
            'CommandException: 10 files/objects could not be copied/removed.\n'
        )

        self.mock._count_corpus_files.return_value = 10  # pylint: disable=protected-access
        self.mock.run_gsutil.return_value = new_process.ProcessResult(
            command=['/fake'],
            return_code=1,
            output=output,
            time_executed=10.0,
            timed_out=False,
        )

        corpus = corpus_manager.GcsCorpus('bucket')
        self.assertTrue(corpus.rsync_to_disk('/dir', timeout=60))

        self.mock.run_gsutil.return_value = new_process.ProcessResult(
            command=['/fake'],
            return_code=1,
            output=output,
            time_executed=30.0,
            timed_out=True,
        )
        self.assertFalse(corpus.rsync_to_disk('/dir', timeout=60))
Example #2
0
  def run_single_testcase(self,
                          testcase_path,
                          timeout=None,
                          additional_args=None):
    """Run a single testcase."""
    self._test_ssh()

    # We need to push the testcase to the device and pass in the name.
    testcase_path_name = os.path.basename(os.path.normpath(testcase_path))
    self.device.store(testcase_path, self.fuzzer.data_path())

    # TODO(flowerhack): Pass libfuzzer args (additional_args) here
    return_code = self.fuzzer.start(['repro', 'data/' + testcase_path_name] +
                                    additional_args)
    self.fuzzer.monitor(return_code)

    with open(self.fuzzer.logfile) as logfile:
      symbolized_output = logfile.read()

    fuzzer_process_result = new_process.ProcessResult()
    fuzzer_process_result.return_code = 0
    fuzzer_process_result.output = symbolized_output
    fuzzer_process_result.time_executed = 0
    fuzzer_process_result.command = self.fuzzer.last_fuzz_cmd
    return fuzzer_process_result
Example #3
0
 def minimize_crash(self,
                    testcase_path,
                    output_path,
                    timeout,
                    artifact_prefix=None,
                    additional_args=None):
   return new_process.ProcessResult()
    def test_run_and_wait_none_env(self):
        """Test RemoteProcessRunner.run_and_wait() with a None env."""
        process_result = new_process.ProcessResult(['command', '123'], 0,
                                                   b'output', 60.0, False)

        self.mock.stub().RunAndWait.return_value = (
            untrusted_runner_pb2.RunAndWaitResponse(
                result=remote_process.process_result_to_proto(process_result)))

        runner = remote_process_host.RemoteProcessRunner(
            '/executable', ['-default_arg'])
        result = runner.run_and_wait()
        result = remote_process_host.process_result_from_proto(result)

        request = self.mock.stub().RunAndWait.call_args[0][0]
        self.assertEqual('/executable', request.executable_path)
        self.assertEqual(['-default_arg'], request.default_args)
        self.assertEqual([], request.additional_args)
        self.assertFalse(request.HasField('timeout'))
        self.assertFalse(request.terminate_before_kill)
        self.assertFalse(request.HasField('terminate_wait_time'))
        self.assertFalse(request.HasField('input_data'))
        self.assertFalse(request.popen_args.HasField('bufsize'))
        self.assertFalse(request.popen_args.HasField('executable'))
        self.assertFalse(request.popen_args.HasField('shell'))
        self.assertFalse(request.popen_args.HasField('cwd'))
        self.assertEqual({}, request.popen_args.env)
        self.assertFalse(request.popen_args.env_is_set)

        self.assertEqual(result.command, process_result.command)
        self.assertEqual(result.return_code, process_result.return_code)
        self.assertEqual(result.output, process_result.output)
        self.assertEqual(result.time_executed, process_result.time_executed)
        self.assertEqual(result.timed_out, process_result.timed_out)
Example #5
0
def process_result_from_proto(process_result_proto):
    """Convert ProcessResult proto to new_process.ProcessResult."""
    return new_process.ProcessResult(process_result_proto.command,
                                     process_result_proto.return_code,
                                     process_result_proto.output,
                                     process_result_proto.time_executed,
                                     process_result_proto.timed_out)
Example #6
0
  def fuzz(self,
           corpus_directories,
           fuzz_timeout,
           artifact_prefix=None,
           additional_args=None,
           extra_env=None):
    """LibFuzzerCommon.fuzz override."""
    self._test_ssh()

    #TODO(flowerhack): Pass libfuzzer args (additional_args) here
    return_code = self.fuzzer.start(additional_args)
    self.fuzzer.monitor(return_code)
    self.process_logs_and_crash(artifact_prefix)

    with open(self.fuzzer.logfile) as logfile:
      symbolized_output = logfile.read()

    # TODO(flowerhack): Would be nice if we could figure out a way to make
    # the "fuzzer start" code return its own ProcessResult. For now, we simply
    # craft one by hand here.
    fuzzer_process_result = new_process.ProcessResult()
    fuzzer_process_result.return_code = 0
    fuzzer_process_result.output = symbolized_output
    fuzzer_process_result.time_executed = 0
    fuzzer_process_result.command = self.fuzzer.last_fuzz_cmd
    return fuzzer_process_result
Example #7
0
 def mock_merge(*args, **kwargs):  # pylint: disable=unused-argument
     """Mock merge."""
     self.fs.create_file('/fuzz-inputs/temp-9001/merge-corpus/A')
     return new_process.ProcessResult(command='merge-command',
                                      return_code=0,
                                      output='merge',
                                      time_executed=2.0,
                                      timed_out=False)
Example #8
0
 def mock_fuzz(*args, **kwargs):  # pylint: disable=unused-argument
     """Mock fuzz."""
     self.fs.create_file('/fuzz-inputs/temp-9001/new/A')
     self.fs.create_file('/fuzz-inputs/temp-9001/new/B')
     return new_process.ProcessResult(command='command',
                                      return_code=0,
                                      output=fuzz_output,
                                      time_executed=2.0,
                                      timed_out=False)
Example #9
0
  def setUp(self):
    super(AflRunnerTest, self).setUp()
    test_helpers.patch_environ(self)
    environment.set_value('HARD_TIMEOUT_OVERRIDE', 600)
    config = launcher.AflConfig.from_target_path(self.TARGET_PATH)
    self.runner = launcher.AflRunner(self.TARGET_PATH, config,
                                     self.TESTCASE_FILE_PATH, self.INPUT_DIR)

    self.fuzz_result = new_process.ProcessResult()
    self.args = ['-iinput1', '-ooutput', '123', '456']
    self.times_called = 0
Example #10
0
    def setUp(self):
        test_helpers.patch_environ(self)

        self.fuzzer_name = 'fake_fuzzer'
        self.job_type = 'fake_job'
        self.dataset_name = 'fake_dataset'
        self.run_name = 'fake_run'
        self.home_dir = train_gradientfuzz_task.GRADIENTFUZZ_SCRIPTS_DIR
        self.models_dir = os.path.join(self.home_dir, constants.MODEL_DIR)
        self.data_dir = os.path.join(self.home_dir, constants.DATASET_DIR)
        self.temp_dir = tempfile.mkdtemp()
        self.binary_path = os.path.join(GRADIENTFUZZ_TESTING_DIR,
                                        TESTING_BINARY)

        os.environ['FUZZ_INPUTS_DISK'] = self.temp_dir
        os.environ['GRADIENTFUZZ_TESTING'] = str(True)

        test_helpers.patch(self, [
            'bot.tasks.ml_train_utils.get_corpus',
            'bot.tasks.train_gradientfuzz_task.gen_inputs_labels',
            'bot.tasks.train_gradientfuzz_task.train_gradientfuzz',
            'bot.tasks.train_gradientfuzz_task.upload_model_to_gcs',
            'build_management.build_manager.setup_build',
        ])

        self.mock.get_corpus.return_value = True
        self.mock.gen_inputs_labels.return_value = new_process.ProcessResult(
            return_code=0), self.dataset_name
        self.mock.train_gradientfuzz.return_value = new_process.ProcessResult(
            return_code=0), self.run_name
        self.mock.upload_model_to_gcs.return_value = True
        self.mock.setup_build.side_effect = self.mock_build_manager

        # Fakes creating directory tree.
        self.fake_dataset_dir = os.path.join(self.data_dir, self.dataset_name)
        self.fake_model_dir = os.path.join(
            self.models_dir, constants.NEUZZ_ONE_HIDDEN_LAYER_MODEL,
            self.run_name)
        os.makedirs(self.fake_dataset_dir)
        os.makedirs(self.fake_model_dir)
Example #11
0
    def test_run_and_wait_empty_env(self, mock_run_and_wait):
        """Test remote_process.run_and_wait() with an empty env."""
        process_result = new_process.ProcessResult(['command', '123'], 0,
                                                   'output', 60.0, False)

        mock_run_and_wait.return_value = process_result

        request = untrusted_runner_pb2.RunAndWaitRequest()
        request.executable_path = '/path'
        request.popen_args.env_is_set = True

        remote_process.run_and_wait(request, None)
        mock_run_and_wait.assert_called_with(additional_args=[], env={})
Example #12
0
    def mock_merge(*args, **kwargs):  # pylint: disable=unused-argument
      """Mock merge."""
      mock_merge_calls.append(self.mock.merge.mock_calls[-1])
      self.assertTrue(len(mock_merge_calls) <= 2)

      merge_output_file = 'merge_step_%d.txt' % len(mock_merge_calls)
      with open(os.path.join(TEST_DIR, merge_output_file)) as f:
        merge_output = f.read()

      self.fs.create_file('/fuzz-inputs/temp-9001/merge-corpus/A')
      return new_process.ProcessResult(
          command='merge-command',
          return_code=0,
          output=merge_output,
          time_executed=2.0,
          timed_out=False)
Example #13
0
    def setUp(self):
        super().setUp()
        test_helpers.patch_environ(self)
        test_helpers.patch(self,
                           ['bot.fuzzers.engine_common.is_lpm_fuzz_target'])
        self.mock.is_lpm_fuzz_target.return_value = True
        environment.set_value('HARD_TIMEOUT_OVERRIDE', 600)
        config = launcher.AflConfig.from_target_path(self.TARGET_PATH)

        self.runner = launcher.AflRunner(self.TARGET_PATH, config,
                                         self.TESTCASE_FILE_PATH,
                                         self.INPUT_DIR)

        self.fuzz_result = new_process.ProcessResult()
        self.args = ['-iinput1', '-ooutput', '123', '456']
        self.times_called = 0
Example #14
0
 def fuzz(self,
          corpus_directories,
          fuzz_timeout,
          artifact_prefix=None,
          additional_args=None,
          extra_env=None):
     """LibFuzzerCommon.fuzz override."""
     self._test_qemu_ssh()
     self.fuzzer.run([])
     # TODO(flowerhack): Modify fuzzer.run() to return a ProcessResult, rather
     # than artisinally handcrafting one here.
     fuzzer_process_result = new_process.ProcessResult()
     fuzzer_process_result.return_code = 0
     fuzzer_process_result.output = ''
     fuzzer_process_result.time_executed = 0
     fuzzer_process_result.command = self.fuzzer.last_fuzz_cmd
     return fuzzer_process_result
Example #15
0
    def fuzz(self,
             corpus_directories,
             fuzz_timeout,
             artifact_prefix=None,
             additional_args=None,
             extra_env=None):
        """LibFuzzerCommon.fuzz override."""
        additional_args = copy.copy(additional_args)
        if additional_args is None:
            additional_args = []

        self._test_ssh()
        self._push_corpora_from_host_to_target(corpus_directories)

        max_total_time = self.get_max_total_time(fuzz_timeout)
        if any(arg.startswith(constants.FORK_FLAG) for arg in additional_args):
            max_total_time -= self.LIBFUZZER_FORK_MODE_CLEAN_EXIT_TIME
        assert max_total_time > 0

        additional_args.extend([
            '%s%d' % (constants.MAX_TOTAL_TIME_FLAG, max_total_time),
            constants.PRINT_FINAL_STATS_ARGUMENT,
        ])

        # Run the fuzzer.
        # TODO: actually we want new_corpus_relative_dir_target for *each* corpus
        return_code = self.fuzzer.start(
            self._corpus_directories_libfuzzer(corpus_directories) +
            additional_args)
        self.fuzzer.monitor(return_code)
        self.process_logs_and_crash(artifact_prefix)
        with open(self.fuzzer.logfile) as logfile:
            symbolized_output = logfile.read()

        self._pull_new_corpus_from_target_to_host(corpus_directories)
        self._clear_all_target_corpora()

        # TODO(flowerhack): Would be nice if we could figure out a way to make
        # the "fuzzer start" code return its own ProcessResult. For now, we simply
        # craft one by hand here.
        fuzzer_process_result = new_process.ProcessResult()
        fuzzer_process_result.return_code = 0
        fuzzer_process_result.output = symbolized_output
        fuzzer_process_result.time_executed = 0
        fuzzer_process_result.command = self.fuzzer.last_fuzz_cmd
        return fuzzer_process_result
  def test_rsync_error_above_threshold(self):
    """Test rsync returning errors (above threshold)."""
    output = (
        'blah\n'
        'blah\n'
        'CommandException: 11 files/objects could not be copied/removed.\n')

    self.mock.run_gsutil.return_value = new_process.ProcessResult(
        command=['/fake'],
        return_code=1,
        output=output,
        time_executed=10.0,
        timed_out=False,
    )

    corpus = corpus_manager.GcsCorpus('bucket')
    self.assertFalse(corpus.rsync_to_disk('/dir', timeout=60))
Example #17
0
    def _process_result(self,
                        command=None,
                        output=None,
                        return_code=0,
                        time_executed=20,
                        timed_out=False):
        """Creates a new_process.ProcessResult with specified values or good
    defaults."""
        if command is None:
            command = ['afl-fuzz', '-iin', '-oout', './fuzzer']
        if output is None:
            output = self.ARBITRARY_OUTPUT

        return new_process.ProcessResult(command=command,
                                         output=output,
                                         return_code=return_code,
                                         time_executed=time_executed,
                                         timed_out=timed_out)
Example #18
0
  def setUp(self):
    test_helpers.patch_environ(self)

    self.fuzzer_name = 'fake_fuzzer'
    self.job_type = 'fake_job'
    self.temp_dir = tempfile.mkdtemp()

    os.environ['FUZZ_INPUTS_DISK'] = self.temp_dir

    test_helpers.patch(self, [
        'bot.tasks.ml_train_utils.get_corpus',
        'bot.tasks.train_rnn_generator_task.train_rnn',
        'bot.tasks.train_rnn_generator_task.upload_model_to_gcs',
    ])

    self.mock.get_corpus.return_value = True
    self.mock.train_rnn.return_value = new_process.ProcessResult(return_code=0)
    self.mock.upload_model_to_gcs.return_value = True
    def test_run_and_wait(self):
        """Test RemoteProcessRunner.run_and_wait()."""
        process_result = new_process.ProcessResult(['command', '123'], 0,
                                                   b'output', 60.0, False)

        self.mock.stub().RunAndWait.return_value = (
            untrusted_runner_pb2.RunAndWaitResponse(
                result=remote_process.process_result_to_proto(process_result)))

        runner = remote_process_host.RemoteProcessRunner(
            '/executable', ['-default_arg'])
        result = runner.run_and_wait(['-additional_arg'],
                                     100.0,
                                     True,
                                     10.0,
                                     b'input',
                                     shell=True,
                                     env={'ASAN_OPTIONS': 'asan_options'},
                                     cwd='/',
                                     max_stdout_len=1337)
        result = remote_process_host.process_result_from_proto(result)

        request = self.mock.stub().RunAndWait.call_args[0][0]
        self.assertEqual('/executable', request.executable_path)
        self.assertEqual(['-default_arg'], request.default_args)
        self.assertEqual(['-additional_arg'], request.additional_args)
        self.assertEqual(100.0, request.timeout)
        self.assertTrue(request.terminate_before_kill)
        self.assertEqual(10.0, request.terminate_wait_time)
        self.assertEqual(b'input', request.input_data)
        self.assertFalse(request.popen_args.HasField('bufsize'))
        self.assertFalse(request.popen_args.HasField('executable'))
        self.assertTrue(request.popen_args.shell)
        self.assertEqual('/', request.popen_args.cwd)
        self.assertEqual({'ASAN_OPTIONS': 'asan_options'},
                         request.popen_args.env)
        self.assertTrue(request.popen_args.env_is_set)
        self.assertEqual(1337, request.max_stdout_len)

        self.assertEqual(result.command, process_result.command)
        self.assertEqual(result.return_code, process_result.return_code)
        self.assertEqual(result.output, process_result.output)
        self.assertEqual(result.time_executed, process_result.time_executed)
        self.assertEqual(result.timed_out, process_result.timed_out)
def train_gradientfuzz(fuzzer_name, dataset_name, num_inputs):
  """Train GradientFuzz model.

  Args:
    fuzzer_name (str): Prefix to --run-name flag.
    dataset_name (str): Inputs/labels stored under
        GRADIENTFUZZ_SCRIPTS_DIR/data/[dataset_name].
    num_inputs (int): Number of input files (for val split/batch size).

  Returns:
    (new_process.ProcessResult): Result of `run_and_wait()`.
    (str): Run name (results stored under
        GRADIENTFUZZ_SCRIPTS_DIR/models/[architecture]/[run_name]).
  """
  if num_inputs < run_constants.MIN_NUM_INPUTS:
    return new_process.ProcessResult(
        return_code=run_constants.ExitCode.CORPUS_TOO_SMALL), None

  batch_size = os.environ.get(
      'GRADIENTFUZZ_BATCH_SIZE', default=min(32, int(num_inputs * 0.4)))
  val_batch_size = os.environ.get(
      'GRADIENTFUZZ_VAL_BATCH_SIZE', default=min(32, int(num_inputs * 0.1)))
  num_epochs = os.environ.get(
      'GRADIENTFUZZ_NUM_EPOCHS', default=run_constants.NUM_EPOCHS)

  script_path = get_script_path(run_constants.TRAIN_MODEL_SCRIPT)
  run_name = fuzzer_name + run_constants.RUN_NAME_SUFFIX
  args_list = [
      script_path, run_constants.RUN_NAME_FLAG, run_name,
      run_constants.DATASET_NAME_FLAG, dataset_name, run_constants.EPOCHS_FLAG,
      str(num_epochs), run_constants.BATCH_SIZE_FLAG,
      str(batch_size), run_constants.VAL_BATCH_SIZE_FLAG,
      str(val_batch_size), run_constants.ARCHITECTURE_FLAG,
      constants.NEUZZ_ONE_HIDDEN_LAYER_MODEL
  ]

  logs.log('Launching training with the following arguments: "{args_list}".')

  # Run process in gradientfuzz directory.
  gradientfuzz_trainer = new_process.ProcessRunner(sys.executable)
  return gradientfuzz_trainer.run_and_wait(
      args_list,
      cwd=GRADIENTFUZZ_SCRIPTS_DIR,
      timeout=run_constants.TRAIN_TIMEOUT), run_name
Example #21
0
def mocked_fuzz(runner):
  """Mocked version of AflRunner.fuzz."""
  fuzz_args = runner.generate_afl_args()

  runner._fuzz_args = fuzz_args  # pylint: disable=protected-access
  engine_common.recreate_directory(runner.afl_output.output_directory)
  runner._fuzzer_stderr = ''  # pylint: disable=protected-access

  # Create the queue directory within AFL's output directory.
  queue = runner.afl_output.queue
  engine_common.recreate_directory(queue)
  new_corpus_dir = os.path.join(DATA_DIRECTORY, 'merge_new_corpus')
  for filename in os.listdir(new_corpus_dir):
    src = os.path.join(new_corpus_dir, filename)
    dst = os.path.join(queue, filename)
    shutil.copy(src, dst)

  return new_process.ProcessResult(
      command=[], return_code=0, output='', time_executed=1)
Example #22
0
    def test_run_and_wait(self, mock_run_and_wait):
        """Test remote_process.run_and_wait()."""
        process_result = new_process.ProcessResult(['command', '123'], 0,
                                                   'output', 60.0, False)

        mock_run_and_wait.return_value = process_result

        request = untrusted_runner_pb2.RunAndWaitRequest()
        request.executable_path = '/path'
        request.default_args.extend(['-default_arg'])
        request.additional_args.extend(['-additional_arg'])
        request.timeout = 100.0
        request.terminate_before_kill = True
        request.terminate_wait_time = 10.0
        request.input_data = 'input'
        request.max_stdout_len = 1337
        request.popen_args.shell = True
        request.popen_args.env.update({'VAR': 'VAL'})
        request.popen_args.env_is_set = True
        request.popen_args.cwd = '/'

        response = remote_process.run_and_wait(request, None)
        result = remote_process_host.process_result_from_proto(response.result)

        mock_run_and_wait.assert_called_with(
            additional_args=['-additional_arg'],
            timeout=100.0,
            terminate_before_kill=True,
            terminate_wait_time=10.0,
            input_data='input',
            max_stdout_len=1337,
            cwd='/',
            env={'VAR': 'VAL'},
            shell=True)

        self.assertEqual(result.command, process_result.command)
        self.assertEqual(result.return_code, process_result.return_code)
        self.assertEqual(result.output, process_result.output)
        self.assertEqual(result.time_executed, process_result.time_executed)
        self.assertEqual(result.timed_out, process_result.timed_out)
Example #23
0
    def merge(self,
              corpus_directories,
              merge_timeout,
              artifact_prefix=None,
              tmp_dir=None,
              additional_args=None):
        # TODO(flowerhack): Integrate some notion of a merge timeout.
        self._push_corpora_from_host_to_target(corpus_directories)

        # Run merge.
        _, _ = self.fuzzer.merge(
            self._corpus_directories_libfuzzer(corpus_directories) +
            additional_args)

        self._pull_new_corpus_from_target_to_host(corpus_directories)
        self._clear_all_target_corpora()

        merge_result = new_process.ProcessResult()
        merge_result.return_code = 0
        merge_result.timed_out = False
        merge_result.output = ''
        merge_result.time_executed = 0
        merge_result.command = ''
        return merge_result