예제 #1
0
def test_using_gsutil(use_gsutil):  # pylint: disable=unused-argument
    """Tests that gsutil is used in Google Cloud running settings."""

    with mock.patch('common.new_process.execute') as mocked_execute:
        filestore_utils.cp(GCS_DIR, GCS_DIR_2, recursive=True)
        assert 'gsutil' in mocked_execute.call_args_list[0][0][0]

    with mock.patch('common.new_process.execute') as mocked_execute:
        filestore_utils.ls(GCS_DIR)
        assert 'gsutil' in mocked_execute.call_args_list[0][0][0]

    with mock.patch('common.new_process.execute') as mocked_execute:
        filestore_utils.rm(GCS_DIR, recursive=True)
        assert 'gsutil' in mocked_execute.call_args_list[0][0][0]

    with mock.patch('common.new_process.execute') as mocked_execute:
        filestore_utils.rsync(GCS_DIR, GCS_DIR_2, recursive=True)
        assert 'gsutil' in mocked_execute.call_args_list[0][0][0]
예제 #2
0
def test_keyword_args(use_gsutil):  # pylint: disable=unused-argument
    """Tests that keyword args, and in particular 'parallel' are handled
    correctly."""

    with mock.patch('common.new_process.execute') as mocked_execute:
        filestore_utils.rm(GCS_DIR_2, recursive=True, parallel=True)
        mocked_execute.assert_called_with(
            ['gsutil', '-m', 'rm', '-r', GCS_DIR_2], expect_zero=True)

    with mock.patch('common.new_process.execute') as mocked_execute:
        mocked_execute.return_value = new_process.ProcessResult(0, '', '')
        filestore_utils.ls(GCS_DIR_2)
        mocked_execute.assert_called_with(['gsutil', 'ls', GCS_DIR_2],
                                          expect_zero=True)

    with mock.patch('common.new_process.execute') as mocked_execute:
        filestore_utils.cp(GCS_DIR, GCS_DIR_2, parallel=True)
        mocked_execute.assert_called_with(
            ['gsutil', '-m', 'cp', GCS_DIR, GCS_DIR_2], expect_zero=True)
예제 #3
0
def test_using_local_filestore(fs, use_local_filestore):  # pylint: disable=invalid-name,unused-argument
    """Tests that local_filestore is used in local running settings."""
    fs.create_dir(LOCAL_DIR)
    fs.create_dir(LOCAL_DIR_2)

    with mock.patch('common.new_process.execute') as mocked_execute:
        filestore_utils.cp(LOCAL_DIR, LOCAL_DIR_2, recursive=True)
        assert 'gsutil' not in mocked_execute.call_args_list[0][0][0]

    with mock.patch('common.new_process.execute') as mocked_execute:
        filestore_utils.ls(LOCAL_DIR)
        assert 'gsutil' not in mocked_execute.call_args_list[0][0][0]

    with mock.patch('common.new_process.execute') as mocked_execute:
        filestore_utils.rm(LOCAL_DIR, recursive=True)
        assert 'gsutil' not in mocked_execute.call_args_list[0][0][0]

    with mock.patch('common.new_process.execute') as mocked_execute:
        filestore_utils.rsync(LOCAL_DIR, LOCAL_DIR_2, recursive=True)
        assert 'gsutil' not in mocked_execute.call_args_list[0][0][0]
예제 #4
0
def get_fuzzer_covered_regions(benchmark_df, benchmark, fuzzer):
    """Gets the covered regions for |fuzzer| in |benchmark_df| from the json
    file in the bucket."""
    with tempfile.TemporaryDirectory() as temp_dir:
        dst_file = os.path.join(temp_dir, 'tmp.json')
        src_filestore_path = get_fuzzer_filestore_path(benchmark_df, fuzzer)
        src_file = posixpath.join(src_filestore_path, 'coverage', 'data',
                                  benchmark, fuzzer, 'covered_regions.json')
        if filestore_utils.ls(src_file, must_exist=False).retcode:
            # Error occurred, coverage file does not exit. Bail out.
            return {}

        filestore_utils.cp(src_file, dst_file)
        with open(dst_file) as json_file:
            return json.load(json_file)
예제 #5
0
def exists_in_experiment_filestore(path: pathlib.Path) -> bool:
    """Returns True if |path| exists in the experiment_filestore."""
    return filestore_utils.ls(exp_path.filestore(path),
                              must_exist=False).retcode == 0
예제 #6
0
    def test_integration_runner(self, mocked_error, tmp_path, environ):
        """Test that runner can run libFuzzer and saves snapshots to GCS."""
        # Switch cwd so that fuzzers don't create tons of files in the repo.
        os.chdir(tmp_path)

        # Set env variables that would be set by the Dockerfile.
        file_directory = pathlib.Path(__file__).parent

        root_dir = file_directory.parent
        os.environ['ROOT_DIR'] = str(root_dir)

        seed_corpus_dir = tmp_path / 'seeds'
        os.mkdir(seed_corpus_dir)
        os.environ['SEED_CORPUS_DIR'] = str(seed_corpus_dir)

        output_corpus_dir = tmp_path / 'corpus'
        os.mkdir(output_corpus_dir)
        os.environ['OUTPUT_CORPUS_DIR'] = str(output_corpus_dir)

        fuzzer = 'libfuzzer'
        fuzzer_parent_path = root_dir / 'fuzzers' / fuzzer

        benchmark = 'MultipleConstraintsOnSmallInputTest'
        test_experiment_bucket = os.environ['TEST_EXPERIMENT_FILESTORE']
        experiment = 'integration-test-experiment'
        gcs_directory = posixpath.join(test_experiment_bucket, experiment,
                                       'experiment-folders',
                                       '%s-%s' % (benchmark, fuzzer),
                                       'trial-1')
        filestore_utils.rm(gcs_directory, force=True)
        # Add fuzzer directory to make it easy to run fuzzer.py in local
        # configuration.
        os.environ['PYTHONPATH'] = ':'.join(
            [str(root_dir), str(fuzzer_parent_path)])

        # Set env variables that would set by the scheduler.
        os.environ['FUZZER'] = fuzzer
        os.environ['BENCHMARK'] = benchmark
        os.environ['EXPERIMENT_FILESTORE'] = test_experiment_bucket
        os.environ['EXPERIMENT'] = experiment

        os.environ['TRIAL_ID'] = str(TRIAL_NUM)

        max_total_time = 10
        os.environ['MAX_TOTAL_TIME'] = str(max_total_time)

        target_binary_path = (file_directory / 'test_data' / 'test_runner' /
                              benchmark)
        with mock.patch('common.fuzzer_utils.get_fuzz_target_binary',
                        return_value=str(target_binary_path)):
            with mock.patch('common.experiment_utils.get_snapshot_seconds',
                            return_value=max_total_time / 10):
                runner.main()

        gcs_corpus_directory = posixpath.join(gcs_directory, 'corpus')
        snapshots = filestore_utils.ls(gcs_corpus_directory)

        assert len(snapshots) >= 2

        # Check that the archives are deleted after being copied to GCS.
        assert not os.path.exists(
            tmp_path / 'corpus-archives' / 'corpus-archive-0001.tar.gz')

        local_gcs_corpus_dir_copy = tmp_path / 'gcs_corpus_dir'
        os.mkdir(local_gcs_corpus_dir_copy)
        filestore_utils.cp(posixpath.join(gcs_corpus_directory, '*'),
                           str(local_gcs_corpus_dir_copy),
                           recursive=True,
                           parallel=True)
        archive_size = os.path.getsize(local_gcs_corpus_dir_copy /
                                       'corpus-archive-0001.tar.gz')

        assert archive_size > 500

        assert len(os.listdir(output_corpus_dir)) > 5
        mocked_error.assert_not_called()