def test_component_dont_add_label(self):
    """Test that we don't set labels for component builds."""
    self.testcase.job_type = 'job'
    self.testcase.put()

    data_types.Job(
        name='job',
        environment_string=(
            'RELEASE_BUILD_BUCKET_PATH = '
            'https://example.com/blah-v8-component-([0-9]+).zip\n')).put()

    self.testcase.is_impact_set_flag = True
    mock_issue = self._make_mock_issue()
    data_handler.update_issue_impact_labels(self.testcase, mock_issue)
    self.assertItemsEqual([], mock_issue.labels.added)
    self.assertItemsEqual([], mock_issue.labels.removed)
    def setUp(self):
        """Set up."""
        super(MinimizeTaskTestUntrusted, self).setUp()
        environment.set_value('JOB_NAME', 'libfuzzer_asan_job')

        helpers.patch(self, [
            'datastore.data_handler.get_data_bundle_bucket_name',
        ])

        patcher = mock.patch(
            'bot.fuzzers.libFuzzer.fuzzer.LibFuzzer.fuzzer_directory',
            new_callable=mock.PropertyMock)

        mock_fuzzer_directory = patcher.start()
        self.addCleanup(patcher.stop)

        mock_fuzzer_directory.return_value = os.path.join(
            environment.get_value('ROOT_DIR'), 'src', 'python', 'bot',
            'fuzzers', 'libFuzzer')

        job = data_types.Job(
            name='libfuzzer_asan_job',
            environment_string=(
                'RELEASE_BUILD_BUCKET_PATH = '
                'gs://clusterfuzz-test-data/test_libfuzzer_builds/'
                'test-libfuzzer-build-([0-9]+).zip\n'
                'REVISION_VARS_URL = https://commondatastorage.googleapis.com/'
                'clusterfuzz-test-data/test_libfuzzer_builds/'
                'test-libfuzzer-build-%s.srcmap.json\n'))
        job.put()

        data_types.FuzzTarget(engine='libFuzzer',
                              binary='test_fuzzer',
                              project='test-project').put()
        data_types.FuzzTargetJob(fuzz_target_name='libFuzzer_test_fuzzer',
                                 engine='libFuzzer',
                                 job='libfuzzer_asan_job').put()

        environment.set_value('USE_MINIJAIL', True)
        data_types.Fuzzer(revision=1,
                          file_size='builtin',
                          source='builtin',
                          name='libFuzzer',
                          max_testcases=4,
                          builtin=True).put()
        self.temp_dir = tempfile.mkdtemp(
            dir=environment.get_value('FUZZ_INPUTS'))
Exemple #3
0
  def setUp(self):
    """Set up."""
    super().setUp()
    environment.set_value('JOB_NAME', 'libfuzzer_asan_job')

    job = data_types.Job(
        name='libfuzzer_asan_job',
        environment_string=(
            'RELEASE_BUILD_BUCKET_PATH = '
            'gs://clusterfuzz-test-data/test_libfuzzer_builds/'
            'test-libfuzzer-build-([0-9]+).zip\n'
            'REVISION_VARS_URL = https://commondatastorage.googleapis.com/'
            'clusterfuzz-test-data/test_libfuzzer_builds/'
            'test-libfuzzer-build-%s.srcmap.json\n'))
    job.put()

    self.temp_dir = tempfile.mkdtemp(dir=environment.get_value('FUZZ_INPUTS'))
Exemple #4
0
    def test_mapping_substituted(self):
        """Ensure that mappings are substituted properly."""
        job = data_types.Job()
        job.name = 'substitute_fuzzers'
        job.put()

        fuzzer_selection.update_mappings_for_job(job, ['fuzzer_1'])

        mappings = _get_fuzzer_list_for_job(job)
        self.assertIn('fuzzer_1', mappings)
        self.assertNotIn('fuzzer_2', mappings)

        fuzzer_selection.update_mappings_for_job(job, ['fuzzer_2'])

        mappings = _get_fuzzer_list_for_job(job)
        self.assertNotIn('fuzzer_1', mappings)
        self.assertIn('fuzzer_2', mappings)
Exemple #5
0
    def test_mapping_added(self):
        """Ensure that we properly add mappings for existing jobs."""
        job = data_types.Job()
        job.name = 'adding_fuzzers'
        job.put()

        fuzzer_selection.update_mappings_for_job(job, ['fuzzer_1'])

        mappings = _get_fuzzer_list_for_job(job)
        self.assertIn('fuzzer_1', mappings)
        self.assertNotIn('fuzzer_2', mappings)

        fuzzer_selection.update_mappings_for_job(job, ['fuzzer_1', 'fuzzer_2'])

        mappings = _get_fuzzer_list_for_job(job)
        self.assertIn('fuzzer_1', mappings)
        self.assertIn('fuzzer_2', mappings)
Exemple #6
0
  def setUp(self):
    self.app = webtest.TestApp(
        webapp2.WSGIApplication([('/load-bigquery-stats',
                                  load_bigquery_stats.Handler)]))

    data_types.Fuzzer(name='fuzzer', jobs=['job']).put()
    data_types.Job(name='job').put()

    test_helpers.patch(self, [
        'google_cloud_utils.big_query.get_api_client',
        'handlers.base_handler.Handler.is_cron',
        'handlers.cron.load_bigquery_stats.Handler._utc_now',
    ])

    self.mock._utc_now.return_value = datetime.datetime(2016, 9, 8)  # pylint: disable=protected-access
    self.mock_bigquery = mock.MagicMock()
    self.mock.get_api_client.return_value = self.mock_bigquery
Exemple #7
0
  def setUp(self):
    flaskapp = flask.Flask('testflask')
    flaskapp.add_url_rule(
        '/load-bigquery-stats',
        view_func=load_bigquery_stats.Handler.as_view('/load-bigquery-stats'))
    self.app = webtest.TestApp(flaskapp)

    data_types.Fuzzer(name='fuzzer', jobs=['job']).put()
    data_types.Job(name='job').put()

    test_helpers.patch(self, [
        'google_cloud_utils.big_query.get_api_client',
        'handlers.base_handler.Handler.is_cron',
        'handlers.cron.load_bigquery_stats.Handler._utc_now',
    ])

    self.mock._utc_now.return_value = datetime.datetime(2016, 9, 8)  # pylint: disable=protected-access
    self.mock_bigquery = mock.MagicMock()
    self.mock.get_api_client.return_value = self.mock_bigquery
    def test_component_add_label(self):
        """Test that we set labels for component builds."""
        self.testcase.job_type = 'job'
        self.testcase.impact_stable_version = 'Stable'
        self.testcase.impact_beta_version = 'Beta'
        self.testcase.put()

        data_types.Job(
            name='job',
            environment_string=(
                'RELEASE_BUILD_BUCKET_PATH = '
                'https://example.com/blah-v8-component-([0-9]+).zip\n')).put()

        self.testcase.is_impact_set_flag = True
        mock_issue = self._make_mock_issue()
        issue_filer.update_issue_impact_labels(self.testcase, mock_issue)
        six.assertCountEqual(self, ['Security_Impact-Stable'],
                             mock_issue.labels.added)
        six.assertCountEqual(self, [], mock_issue.labels.removed)
    def setUp(self):
        helpers.patch_environ(self)

        self.today = datetime.datetime.utcnow().date()
        self.today_minus_2 = self.today - datetime.timedelta(days=2)

        job_info = data_types.Job(name='job1',
                                  environment_string='PROJECT_NAME = xyz_name')
        job_info.put()

        cov_info = data_types.CoverageInformation(fuzzer='xyz_name',
                                                  date=self.today_minus_2)
        cov_info.html_report_url = 'https://report_for_xyz/20161019/index.html'
        cov_info.put()

        cov_info = data_types.CoverageInformation(fuzzer='xyz_name',
                                                  date=self.today)
        cov_info.html_report_url = 'https://report_for_xyz/20161021/index.html'
        cov_info.put()
Exemple #10
0
    def test_get_components_list(self):
        """Test get_components_list."""
        data_types.Job(name='libfuzzer_asan_libass',
                       environment_string=('PROJECT_NAME = libass\n'
                                           'HELP_URL = help_url\n')).put()
        revisions_dict = {
            u'/src/libass': {
                u'url': u'https://github.com/libass/libass.git',
                u'rev': u'35dc4dd0e14e3afb4a2c7e319a3f4110e20c7cf2',
            },
            u'/src/fribidi': {
                u'url': u'https://github.com/behdad/fribidi.git',
                u'rev': u'881b8d891cc61989ab8811b74d0e721f72bf913b',
            }
        }

        expected_components_list = [u'/src/libass', u'/src/fribidi']
        actual_components_list = revisions.get_components_list(
            revisions_dict, 'libfuzzer_asan_libass')
        self.assertEqual(expected_components_list, actual_components_list)
    def test_engine_fuzzer_job(self):
        """Test variant task with an engine fuzzer job."""
        testcase = data_types.Testcase(
            job_type='libfuzzer_asan_project',
            fuzzer_name='libFuzzer',
            overridden_fuzzer_name='libfuzzer_project_binary_name',
            project_name='project',
            crash_type='crash-type',
            crash_address='0x1337',
            crash_state='A\nB\nC\n',
            crash_revision=1337)
        testcase.set_metadata('fuzzer_binary_name',
                              'binary_name',
                              update_testcase=True)

        job = data_types.Job()
        job.name = 'afl_asan_project'
        job.environment_string = 'PROJECT_NAME = project\n'
        job.put()

        variant_testcase = variant_task._get_variant_testcase_for_job(  # pylint: disable=protected-access
            testcase, 'afl_asan_project')
        self.assertNotEqual(testcase, variant_testcase)
        self.assertEqual(testcase.key.id(), variant_testcase.key.id())
        self.assertEqual('afl', variant_testcase.fuzzer_name)
        self.assertEqual('afl_project_binary_name',
                         variant_testcase.overridden_fuzzer_name)
        self.assertEqual('afl_asan_project', variant_testcase.job_type)

        self.assertEqual('crash-type', variant_testcase.crash_type)
        self.assertEqual('0x1337', variant_testcase.crash_address)
        self.assertEqual('A\nB\nC\n', variant_testcase.crash_state)
        self.assertEqual(1337, variant_testcase.crash_revision)
        self.assertEqual('binary_name',
                         variant_testcase.get_metadata('fuzzer_binary_name'))

        # Test that a put() call does not change original testcase.
        variant_testcase.comments = 'ABC'
        variant_testcase.put()
        testcase = data_handler.get_testcase_by_id(testcase.key.id())
        self.assertEqual('', testcase.comments)
Exemple #12
0
    def test_get_components_list_main_repo(self):
        """Test get_components_list with a main_repo set."""
        data_types.Job(
            name='libfuzzer_asan_project',
            environment_string=('PROJECT_NAME = project\n'
                                'MAIN_REPO = https://github.com/org/main.git\n'
                                'HELP_URL = help_url\n')).put()
        revisions_dict = {
            '/src/main': {
                'url': 'https://github.com/org/main.git',
                'rev': '35dc4dd0e14e3afb4a2c7e319a3f4110e20c7cf2',
            },
            '/src/project-fuzzing-corpus': {
                'url': 'https://github.com/org/project-fuzzing-corpus.git',
                'rev': '881b8d891cc61989ab8811b74d0e721f72bf913b',
            }
        }

        expected_components_list = ['/src/main', '/src/project-fuzzing-corpus']
        actual_components_list = revisions.get_components_list(
            revisions_dict, 'libfuzzer_asan_project')
        self.assertEqual(expected_components_list, actual_components_list)
  def test_get_components_list(self):
    """Test get_components_list."""
    data_types.Job(
        name="libfuzzer_asan_libass",
        environment_string=("PROJECT_NAME = libass\n"
                            "HELP_URL = help_url\n"),
    ).put()
    revisions_dict = {
        u"/src/libass": {
            u"url": u"https://github.com/libass/libass.git",
            u"rev": u"35dc4dd0e14e3afb4a2c7e319a3f4110e20c7cf2",
        },
        u"/src/fribidi": {
            u"url": u"https://github.com/behdad/fribidi.git",
            u"rev": u"881b8d891cc61989ab8811b74d0e721f72bf913b",
        },
    }

    expected_components_list = [u"/src/libass", u"/src/fribidi"]
    actual_components_list = revisions.get_components_list(
        revisions_dict, "libfuzzer_asan_libass")
    self.assertEqual(expected_components_list, actual_components_list)
Exemple #14
0
    def test_libfuzzer_skip_minimization_initial_crash_state(self):
        """Test libFuzzer minimization skipping with a valid initial crash state."""
        # TODO(ochang): Fix circular import.
        from crash_analysis.crash_result import CrashResult

        data_types.Job(name='libfuzzer_asan_job').put()
        testcase = data_types.Testcase(minimized_keys='',
                                       fuzzed_keys='FUZZED_KEY',
                                       job_type='libfuzzer_asan_job',
                                       security_flag=True)
        testcase.put()

        stacktrace = (
            '==14970==ERROR: AddressSanitizer: heap-buffer-overflow on address '
            '0x61b00001f7d0 at pc 0x00000064801b bp 0x7ffce478dbd0 sp '
            '0x7ffce478dbc8 READ of size 4 at 0x61b00001f7d0 thread T0\n'
            '#0 0x64801a in frame0() src/test.cpp:1819:15\n'
            '#1 0x647ac5 in frame1() src/test.cpp:1954:25\n'
            '#2 0xb1dee7 in frame2() src/test.cpp:160:9\n'
            '#3 0xb1ddd8 in frame3() src/test.cpp:148:34\n')
        self.mock._run_libfuzzer_testcase.return_value = CrashResult(  # pylint: disable=protected-access
            1, 1.0, stacktrace)

        self.mock._run_libfuzzer_tool.return_value = (None, None)  # pylint: disable=protected-access

        minimize_task.do_libfuzzer_minimization(testcase,
                                                '/testcase_file_path')

        testcase = data_handler.get_testcase_by_id(testcase.key.id())
        self.assertEqual('Heap-buffer-overflow', testcase.crash_type)
        self.assertEqual('frame0\nframe1\nframe2\n', testcase.crash_state)
        self.assertEqual('0x61b00001f7d0', testcase.crash_address)
        self.assertEqual(
            '+----------------------------------------Release Build Stacktrace'
            '----------------------------------------+\n%s' % stacktrace,
            testcase.crash_stacktrace)
Exemple #15
0
    def setUp(self):
        test_helpers.patch(self, [
            'libs.access.get_access',
            'base.external_users.allowed_jobs_for_user',
            'libs.helpers.get_user_email',
        ])

        data_types.Job(name='libfuzzer_asan_lib',
                       environment_string=('PROJECT_NAME = lib\n'
                                           'CORPUS_PRUNE = True')).put()
        data_types.Job(name='afl_asan_lib',
                       environment_string=('PROJECT_NAME = lib\n')).put()
        data_types.Job(name='libfuzzer_msan_lib',
                       environment_string='PROJECT_NAME = lib').put()
        data_types.Job(name='afl_asan_lib2',
                       environment_string=('PROJECT_NAME = lib2\n')).put()

        data_types.Job(name='libfuzzer_asan_lib2',
                       environment_string=('PROJECT_NAME = lib2\n'
                                           'CORPUS_PRUNE = True')).put()
        data_types.Job(name='libfuzzer_ubsan_lib2',
                       environment_string='PROJECT_NAME = lib2').put()

        data_types.FuzzTarget(engine='afl', binary='fuzzer',
                              project='lib2').put()
        data_types.FuzzTargetJob(fuzz_target_name='afl_lib2_fuzzer',
                                 job='afl_asan_lib2',
                                 last_run=datetime.datetime.utcnow()).put()
        data_types.FuzzTarget(engine='libFuzzer',
                              binary='fuzzer',
                              project='lib2').put()
        data_types.FuzzTargetJob(fuzz_target_name='libFuzzer_lib2_fuzzer',
                                 job='libfuzzer_asan_lib2',
                                 last_run=datetime.datetime.utcnow()).put()
        data_types.FuzzTargetJob(fuzz_target_name='libFuzzer_lib2_fuzzer',
                                 job='libfuzzer_ubsan_lib2',
                                 last_run=datetime.datetime.utcnow()).put()

        self.maxDiff = None  # pylint: disable=invalid-name
  def setUp(self):
    """Set up."""
    super(CorpusPruningTestUntrusted, self).setUp()
    environment.set_value("JOB_NAME", "libfuzzer_asan_job")

    helpers.patch(
        self,
        [
            "bot.fuzzers.engine.get",
            "bot.fuzzers.libFuzzer.fuzzer.LibFuzzer.fuzzer_directory",
            "base.tasks.add_task",
            "datastore.data_handler.get_data_bundle_bucket_name",
        ],
    )

    self.mock.get.return_value = libFuzzer_engine.LibFuzzerEngine()
    self.mock.fuzzer_directory.return_value = os.path.join(
        environment.get_value("ROOT_DIR"),
        "src",
        "python",
        "bot",
        "fuzzers",
        "libFuzzer",
    )

    self.corpus_bucket = os.environ["CORPUS_BUCKET"]
    self.quarantine_bucket = os.environ["QUARANTINE_BUCKET"]
    self.backup_bucket = os.environ["BACKUP_BUCKET"]

    job = data_types.Job(
        name="libfuzzer_asan_job",
        environment_string=("APP_NAME = test_fuzzer\n"
                            "CORPUS_BUCKET = {corpus_bucket}\n"
                            "QUARANTINE_BUCKET = {quarantine_bucket}\n"
                            "BACKUP_BUCKET={backup_bucket}\n"
                            "RELEASE_BUILD_BUCKET_PATH = "
                            "gs://clusterfuzz-test-data/test_libfuzzer_builds/"
                            "test-libfuzzer-build-([0-9]+).zip\n"
                            "REVISION_VARS_URL = gs://clusterfuzz-test-data/"
                            "test_libfuzzer_builds/"
                            "test-libfuzzer-build-%s.srcmap.json\n".format(
                                corpus_bucket=self.corpus_bucket,
                                quarantine_bucket=self.quarantine_bucket,
                                backup_bucket=self.backup_bucket,
                            )),
    )
    job.put()

    job = data_types.Job(
        name="libfuzzer_asan_job2",
        environment_string=("APP_NAME = test2_fuzzer\n"
                            "BACKUP_BUCKET = {backup_bucket}\n"
                            "CORPUS_FUZZER_NAME_OVERRIDE = libfuzzer\n".format(
                                backup_bucket=self.backup_bucket)),
    )
    job.put()

    os.environ["PROJECT_NAME"] = "oss-fuzz"
    data_types.FuzzTarget(
        engine="libFuzzer", project="test", binary="test_fuzzer").put()
    data_types.FuzzTargetJob(
        fuzz_target_name="libFuzzer_test_fuzzer",
        engine="libFuzzer",
        job="libfuzzer_asan_job",
        last_run=datetime.datetime.now(),
    ).put()

    data_types.FuzzTarget(
        engine="libFuzzer", project="test2", binary="fuzzer").put()
    data_types.FuzzTargetJob(
        fuzz_target_name="libFuzzer_test2_fuzzer",
        engine="libFuzzer",
        job="libfuzzer_asan_job2",
        last_run=datetime.datetime.now(),
    ).put()

    environment.set_value("USE_MINIJAIL", True)
    environment.set_value("SHARED_CORPUS_BUCKET", TEST_SHARED_BUCKET)

    # Set up remote corpora.
    self.corpus = corpus_manager.FuzzTargetCorpus("libFuzzer", "test_fuzzer")
    self.corpus.rsync_from_disk(os.path.join(TEST_DIR, "corpus"), delete=True)

    self.quarantine_corpus = corpus_manager.FuzzTargetCorpus(
        "libFuzzer", "test_fuzzer", quarantine=True)
    self.quarantine_corpus.rsync_from_disk(
        os.path.join(TEST_DIR, "quarantine"), delete=True)

    self.mock.get_data_bundle_bucket_name.return_value = TEST_GLOBAL_BUCKET
    data_types.DataBundle(
        name="bundle", is_local=True, sync_to_worker=True).put()

    data_types.Fuzzer(
        revision=1,
        file_size="builtin",
        source="builtin",
        name="libFuzzer",
        max_testcases=4,
        builtin=True,
        data_bundle_name="bundle",
    ).put()

    self.temp_dir = tempfile.mkdtemp()

    # Copy corpus backup in the older date format.
    corpus_backup_date = datetime.datetime.utcnow().date() - datetime.timedelta(
        days=data_types.CORPUS_BACKUP_PUBLIC_LOOKBACK_DAYS)
    corpus_backup_dir = "gs://{bucket}/corpus/libfuzzer/test2_fuzzer/"
    gsutil.GSUtilRunner().run_gsutil([
        "cp",
        (corpus_backup_dir + "backup.zip").format(bucket=TEST2_BACKUP_BUCKET),
        (corpus_backup_dir +
         "%s.zip" % corpus_backup_date).format(bucket=self.backup_bucket),
    ])
Exemple #17
0
    def test_unreproducible_get(self):
        """Test valid unreproducible testcase."""
        self.mock.get_last_crash_time.return_value = datetime.datetime(
            2000, 1, 1)

        testcase = data_types.Testcase()
        testcase.job_type = 'windows_asan_chrome'
        testcase.crash_type = 'crash_type1\ncrash_type2'
        testcase.crash_address = 'crash_address'
        testcase.crash_state = 'crash_state'
        testcase.crash_revision = 123
        testcase.regression = None
        testcase.fixed = None
        testcase.fuzzed_keys = None
        testcase.minimized_keys = None
        testcase.timestamp = datetime.datetime(1970, 1, 1)
        testcase.project_name = 'chromium'
        testcase.one_time_crasher_flag = True
        testcase.put()

        job = data_types.Job()
        job.name = 'windows_asan_chrome'
        job.custom_binary_revision = 1234
        job.put()

        self.mock.can_user_access_testcase.return_value = True
        self.mock.get_issue_url.return_value = 'issue_url'
        self.mock.get_stacktrace.return_value = 'crash_stacktrace'
        self.mock.filter_stacktrace.return_value = 'crash_stacktrace'
        self.mock.get_environment.return_value = ({'HELP_URL': 'help_url'})
        self.mock.generate_csrf_token.return_value = 'csrf'

        result = show.get_testcase_detail_by_id(2)
        expected_subset = {
            'id': 2,
            'crash_type': 'crash_type1 crash_type2',
            'crash_address': 'crash_address',
            'crash_state': 'crash_state',
            'crash_state_lines': ['crash_state'],
            'crash_revision': 123,
            'csrf_token': 'csrf',
            'external_user': True,
            'footer': '',
            'fixed': 'NO',
            'issue_url': 'issue_url',
            'metadata': {},
            'minimized_testcase_size': '',
            'needs_refresh': True,
            'original_testcase_size': '',
            'privileged_user': False,
            'regression': 'Pending',
            'security_severity': None,
            'show_impact': False,
            'show_blame': True,
            'auto_delete_timestamp': 947289600.0,
            'auto_close_timestamp': None,
            'memory_tool_display_label': 'Sanitizer',
            'memory_tool_display_value': 'address (ASAN)',
            'last_tested': 'name: 0:revision<br />',
            'is_admin_or_not_oss_fuzz': True,
            'has_issue_tracker': True,
            'reproduction_help_url': 'help_url',
        }

        self.maxDiff = None  # pylint: disable=invalid-name
        self.assertDictContainsSubset(expected_subset, result)
        self.assertEqual(result['testcase'].key.id(), testcase.key.id())

        self.assertDictContainsSubset(
            {'lines': [show.Line(1, 'crash_stacktrace', False)]},
            result['crash_stacktrace'])
        self.assertDictContainsSubset(
            {'lines': [show.Line(1, 'crash_stacktrace', False)]},
            result['second_crash_stacktrace'])
        self.assertDictContainsSubset(
            {'lines': [show.Line(1, 'crash_stacktrace', False)]},
            result['last_tested_crash_stacktrace'])
Exemple #18
0
    def setUp(self):
        test_helpers.patch_environ(self)
        # Set up a Fuzzer.
        data_types.Fuzzer(
            revision=1,
            additional_environment_string=
            'FUCHSIA_RESOURCES_URL = gs://fuchsia-on-clusterfuzz-v2/*',
            builtin=True,
            differential=False,
            file_size='builtin',
            jobs=['libfuzzer_asan_test_fuzzer'],
            name='libFuzzer',
            source='builtin',
            max_testcases=4).put()

        # Set up a FuzzerJob.
        data_types.FuzzerJob(fuzzer='libFuzzer',
                             job='libfuzzer_asan_test_fuzzer',
                             platform='FUCHSIA',
                             weight=1.0).put()

        # Set up a FuzzTarget
        data_types.FuzzTarget(binary='libfuzzer_asan_test_fuzzer',
                              engine='libFuzzer',
                              project='test-project').put()

        # Set up a FuzzTargetJob
        data_types.FuzzTargetJob(
            engine='libFuzzer',
            fuzz_target_name='libFuzzer_libfuzzer_asan_test_fuzzer',
            job='libfuzzer_asan_test_fuzzer',
            weight=1.0).put()

        # Set up a Job
        data_types.Job(environment_string=(
            'CUSTOM_BINARY = True\n'
            'FUCHSIA_RESOURCES_URL = gs://fuchsia-on-clusterfuzz-v2/*\n'
            'QUEUE_OVERRIDE=FUCHSIA\n'
            'OS_OVERRIDE=FUCHSIA'),
                       name='libfuzzer_asan_test_fuzzer',
                       platform='FUCHSIA',
                       templates=['libfuzzer', 'engine_asan']).put()

        # Set up a JobTemplate
        data_types.JobTemplate(
            name='libfuzzer',
            environment_string=('APP_NAME = launcher.py\n'
                                'MAX_FUZZ_THREADS = 1\n'
                                'MAX_TESTCASES = 4\n'
                                'FUZZ_TEST_TIMEOUT = 4800\n'
                                'TEST_TIMEOUT = 30\n'
                                'WARMUP_TIMEOUT = 30\n'
                                'BAD_BUILD_CHECK = False\n'
                                'THREAD_ALIVE_CHECK_INTERVAL = 1\n'
                                'REPORT_OOMS_AND_HANGS = True\n'
                                'CORPUS_FUZZER_NAME_OVERRIDE = libFuzzer\n'
                                'ENABLE_GESTURES = False\n'
                                'THREAD_DELAY = 30.0')).put()

        # Set up another JobTemplate
        data_types.JobTemplate(
            name='engine_asan',
            environment_string=
            ('LSAN = True\n'
             'ADDITIONAL_ASAN_OPTIONS = quarantine_size_mb=64:strict_memcmp=1'
             ':symbolize=0:fast_unwind_on_fatal=0'
             ':allocator_release_to_os_interval_ms=500\n')).put()

        environment.set_value('QUEUE_OVERRIDE', 'FUCHSIA')
        environment.set_value('OS_OVERRIDE', 'FUCHSIA')
        environment.set_value('FUCHSIA_RESOURCES_URL',
                              'gs://fuchsia-on-clusterfuzz-v2/*')
        # set_bot_environment gives us access to RESOURCES_DIR
        environment.set_bot_environment()
        # Cannot simply call super(TestLauncherFuchsia).setUp, because the
        # with_cloud_emulators decorator modifies what the parent class would be.
        # Just explicitly call BaseLauncherTest's setUp.
        BaseLauncherTest.setUp(self)
  def setUp(self):
    data_types.Job(
        name='job1',
        environment_string='ISSUE_VIEW_RESTRICTIONS = all',
        platform='linux').put()

    data_types.Job(
        name='job2',
        environment_string='ISSUE_VIEW_RESTRICTIONS = security',
        platform='linux').put()

    data_types.Job(
        name='job3',
        environment_string='ISSUE_VIEW_RESTRICTIONS = none',
        platform='linux').put()

    data_types.Job(
        name='chromeos_job4', environment_string='', platform='linux').put()

    testcase_args = {
        'crash_type': 'Heap-use-after-free',
        'crash_address': '0x1337',
        'crash_state': '1\n2\n3\n',
        'crash_stacktrace': 'stack\n',
        'fuzzer_name': 'fuzzer',
    }

    self.testcase1 = data_types.Testcase(job_type='job1', **testcase_args)
    self.testcase1.put()

    self.testcase1_security = data_types.Testcase(
        security_flag=True, job_type='job1', **testcase_args)
    self.testcase1_security.put()

    self.testcase2 = data_types.Testcase(job_type='job2', **testcase_args)
    self.testcase2.put()

    self.testcase2_security = data_types.Testcase(
        security_flag=True, job_type='job2', **testcase_args)
    self.testcase2_security.put()

    self.testcase3 = data_types.Testcase(job_type='job3', **testcase_args)
    self.testcase3.put()

    self.testcase3_security = data_types.Testcase(
        job_type='job3', security_flag=True, **testcase_args)
    self.testcase3_security.put()

    self.testcase4 = data_types.Testcase(
        job_type='chromeos_job4', **testcase_args)
    self.testcase4.put()

    self.testcase5 = data_types.Testcase(
        job_type='job',
        additional_metadata='{"issue_labels": "label1 , label2,,"}',
        **testcase_args)
    self.testcase5.put()

    self.testcase6 = data_types.Testcase(
        job_type='job', additional_metadata='invalid', **testcase_args)
    self.testcase5.put()

    data_types.ExternalUserPermission(
        email='*****@*****.**',
        entity_name='job2',
        entity_kind=data_types.PermissionEntityKind.JOB,
        is_prefix=False,
        auto_cc=data_types.AutoCCType.ALL).put()

    data_types.ExternalUserPermission(
        email='*****@*****.**',
        entity_name='job3',
        entity_kind=data_types.PermissionEntityKind.JOB,
        is_prefix=False,
        auto_cc=data_types.AutoCCType.SECURITY).put()

    helpers.patch(self, [
        'base.utils.utcnow',
        'datastore.data_handler.get_issue_description',
    ])

    self.mock.get_issue_description.return_value = 'Issue'
    self.mock.utcnow.return_value = datetime.datetime(2016, 1, 1)
 def test_not_found(self):
     """Test target not found."""
     data_types.Job(name='job', environment_string='').put()
     self.assertEqual(
         (None, None),
         upload_testcase.find_fuzz_target('libFuzzer', 'notfound', 'job'))
Exemple #21
0
  def setUp(self):
    helpers.patch_environ(self)

    data_types.Job(name='job_linux', platform='LINUX').put()
    data_types.Job(name='job_project', platform='PROJECT_LINUX_LIB').put()
Exemple #22
0
def sync_cf_job(project, info, corpus_bucket, quarantine_bucket, logs_bucket,
                backup_bucket, libfuzzer, afl):
  """Sync the config with ClusterFuzz."""
  # Create/update ClusterFuzz jobs.
  for template in get_jobs_for_project(project, info):
    if template.engine == 'libfuzzer':
      fuzzer_entity = libfuzzer
    elif template.engine == 'afl':
      fuzzer_entity = afl
    elif template.engine == 'none':
      # Engine-less jobs are not automatically managed.
      continue
    else:
      raise OssFuzzSetupException('Invalid fuzzing engine.')

    job_name = template.job_name(project)
    job = data_types.Job.query(data_types.Job.name == job_name).get()
    if not job:
      job = data_types.Job()

    if job_name not in fuzzer_entity.jobs and not info.get('disabled', False):
      # Enable new job.
      fuzzer_entity.jobs.append(job_name)

    job.name = job_name
    job.platform = untrusted.platform_name(project, 'linux')
    job.templates = template.cf_job_templates

    revision_vars_url = REVISION_URL.format(
        project=project,
        bucket=_get_build_bucket_for_engine(template.engine),
        sanitizer=template.memory_tool)

    job.environment_string = JOB_TEMPLATE.format(
        build_bucket_path=get_build_bucket_path(project, template.engine,
                                                template.memory_tool),
        logs_bucket=logs_bucket,
        corpus_bucket=corpus_bucket,
        quarantine_bucket=quarantine_bucket,
        backup_bucket=backup_bucket,
        engine=template.engine,
        project=project,
        revision_vars_url=revision_vars_url)

    help_url = info.get('help_url')
    if help_url:
      job.environment_string += 'HELP_URL = %s\n' % help_url

    if template.experimental:
      job.environment_string += 'EXPERIMENTAL = True\n'

    if template.minimize_job_override:
      minimize_job_override = template.minimize_job_override.job_name(project)
      job.environment_string += (
          'MINIMIZE_JOB_OVERRIDE = %s\n' % minimize_job_override)

    view_restrictions = info.get('view_restrictions')
    if view_restrictions:
      if view_restrictions in ALLOWED_VIEW_RESTRICTIONS:
        job.environment_string += (
            'ISSUE_VIEW_RESTRICTIONS = %s\n' % view_restrictions)
      else:
        logs.log_error('Invalid view restriction setting %s for project %s.' %
                       (view_restrictions, project))

    selective_unpack = info.get('selective_unpack')
    if selective_unpack:
      job.environment_string += 'UNPACK_ALL_FUZZ_TARGETS_AND_FILES = False\n'

    job.put()
Exemple #23
0
    def post(self):
        """Handle a post request."""
        name = self.request.get('name')
        if not name:
            raise helpers.EarlyExitException('Please give this job a name!',
                                             400)

        if not data_types.Job.VALID_NAME_REGEX.match(name):
            raise helpers.EarlyExitException(
                'Job name can only contain letters, numbers, dashes and underscores.',
                400)

        fuzzers = self.request.get('fuzzers', []).split(',')
        templates = self.request.get('templates', '').splitlines()
        for template in templates:
            if not data_types.JobTemplate.query(
                    data_types.JobTemplate.name == template).get():
                raise helpers.EarlyExitException(
                    'Invalid template name(s) specified.', 400)

        new_platform = self.request.get('platform')
        if not new_platform or new_platform == 'undefined':
            raise helpers.EarlyExitException('No platform provided for job.',
                                             400)

        description = self.request.get('description', '')
        environment_string = self.request.get('environment_string', '')
        previous_custom_binary_revision = 0

        job = data_types.Job.query(data_types.Job.name == name).get()
        recreate_fuzzer_mappings = False
        if not job:
            job = data_types.Job()
        else:
            previous_custom_binary_revision = job.custom_binary_revision
            if previous_custom_binary_revision is None:
                previous_custom_binary_revision = 0
            if new_platform != job.platform:
                # The rare case of modifying a job's platform causes many problems with
                # task selection. If a job is leased from the old queue, the task will
                # be recreated in the correct queue at lease time. Fuzzer mappings must
                # be purged and recreated, since they depend on the job's platform.
                recreate_fuzzer_mappings = True

        job.name = name
        job.platform = new_platform
        job.description = description
        job.environment_string = environment_string
        job.templates = templates

        blob_info = self.get_upload()
        if blob_info:
            job.custom_binary_key = str(blob_info.key())
            job.custom_binary_filename = blob_info.filename
            job.custom_binary_revision = previous_custom_binary_revision + 1

        if job.custom_binary_key and 'CUSTOM_BINARY' not in job.environment_string:
            job.environment_string += '\nCUSTOM_BINARY = True'

        job.put()

        fuzzer_selection.update_mappings_for_job(job, fuzzers)
        if recreate_fuzzer_mappings:
            fuzzer_selection.update_platform_for_job(name, new_platform)

        # pylint: disable=unexpected-keyword-arg
        _ = data_handler.get_all_job_type_names(__memoize_force__=True)

        helpers.log('Job created %s' % name, helpers.MODIFY_OPERATION)
        template_values = {
            'title':
            'Success',
            'message': ('Job %s is successfully updated. '
                        'Redirecting back to jobs page...') % name,
            'redirect_url':
            '/jobs',
        }
        self.render('message.html', template_values)
Exemple #24
0
  def _sync_job(self, project, info, corpus_bucket_name, quarantine_bucket_name,
                logs_bucket_name, backup_bucket_name):
    """Sync the config with ClusterFuzz."""
    # Create/update ClusterFuzz jobs.
    for template in get_jobs_for_project(project, info):
      if template.engine == 'none':
        # Engine-less jobs are not automatically managed.
        continue

      fuzzer_entity = self._fuzzer_entities.get(template.engine)
      if not fuzzer_entity:
        raise ProjectSetupError('Invalid fuzzing engine ' + template.engine)

      job_name = template.job_name(project, self._config_suffix)
      job = data_types.Job.query(data_types.Job.name == job_name).get()
      if not job:
        job = data_types.Job()

      if job_name not in fuzzer_entity.jobs and not info.get('disabled', False):
        # Enable new job.
        fuzzer_entity.jobs.append(job_name)

      job.name = job_name
      if self._segregate_projects:
        job.platform = untrusted.platform_name(project, 'linux')
      else:
        # TODO(ochang): Support other platforms?
        job.platform = 'LINUX'

      job.templates = template.cf_job_templates

      job.environment_string = JOB_TEMPLATE.format(
          build_type=self._build_type,
          build_bucket_path=self._get_build_bucket_path(
              project, info, template.engine, template.memory_tool,
              template.architecture),
          engine=template.engine,
          project=project)

      if self._add_revision_mappings:
        revision_vars_url = self._revision_url_template.format(
            project=project,
            bucket=self._get_build_bucket(template.engine,
                                          template.architecture),
            sanitizer=template.memory_tool)

        job.environment_string += (
            'REVISION_VARS_URL = {revision_vars_url}\n'.format(
                revision_vars_url=revision_vars_url))

      if logs_bucket_name:
        job.environment_string += 'FUZZ_LOGS_BUCKET = {logs_bucket}\n'.format(
            logs_bucket=logs_bucket_name)

      if corpus_bucket_name:
        job.environment_string += 'CORPUS_BUCKET = {corpus_bucket}\n'.format(
            corpus_bucket=corpus_bucket_name)

      if quarantine_bucket_name:
        job.environment_string += (
            'QUARANTINE_BUCKET = {quarantine_bucket}\n'.format(
                quarantine_bucket=quarantine_bucket_name))

      if backup_bucket_name:
        job.environment_string += 'BACKUP_BUCKET = {backup_bucket}\n'.format(
            backup_bucket=backup_bucket_name)

      if self._add_info_labels:
        job.environment_string += (
            'AUTOMATIC_LABELS = Proj-{project},Engine-{engine}\n'.format(
                project=project,
                engine=template.engine,
            ))

      help_url = info.get('help_url')
      if help_url:
        job.environment_string += 'HELP_URL = %s\n' % help_url

      if template.experimental:
        job.environment_string += 'EXPERIMENTAL = True\n'

      if template.minimize_job_override:
        minimize_job_override = template.minimize_job_override.job_name(
            project, self._config_suffix)
        job.environment_string += (
            'MINIMIZE_JOB_OVERRIDE = %s\n' % minimize_job_override)

      view_restrictions = info.get('view_restrictions')
      if view_restrictions:
        if view_restrictions in ALLOWED_VIEW_RESTRICTIONS:
          job.environment_string += (
              'ISSUE_VIEW_RESTRICTIONS = %s\n' % view_restrictions)
        else:
          logs.log_error('Invalid view restriction setting %s for project %s.' %
                         (view_restrictions, project))

      selective_unpack = info.get('selective_unpack')
      if selective_unpack:
        job.environment_string += 'UNPACK_ALL_FUZZ_TARGETS_AND_FILES = False\n'

      main_repo = info.get('main_repo')
      if main_repo:
        job.environment_string += f'MAIN_REPO = {main_repo}\n'

      if (template.engine == 'libfuzzer' and
          template.architecture == 'x86_64' and
          'dataflow' in info.get('fuzzing_engines', DEFAULT_ENGINES)):
        # Dataflow binaries are built with dataflow sanitizer, but can be used
        # as an auxiliary build with libFuzzer builds (e.g. with ASan or UBSan).
        dataflow_build_bucket_path = self._get_build_bucket_path(
            project_name=project,
            info=info,
            engine='dataflow',
            memory_tool='dataflow',
            architecture=template.architecture)
        job.environment_string += (
            'DATAFLOW_BUILD_BUCKET_PATH = %s\n' % dataflow_build_bucket_path)

      if self._additional_vars:
        additional_vars = {}
        additional_vars.update(self._additional_vars.get('all', {}))

        engine_vars = self._additional_vars.get(template.engine, {})
        engine_sanitizer_vars = engine_vars.get(template.memory_tool, {})
        additional_vars.update(engine_sanitizer_vars)

        for key, value in sorted(six.iteritems(additional_vars)):
          job.environment_string += ('{} = {}\n'.format(
              key,
              str(value).encode('unicode-escape').decode('utf-8')))

      job.put()
    def _sync_job(
        self,
        project,
        info,
        corpus_bucket_name,
        quarantine_bucket_name,
        logs_bucket_name,
        backup_bucket_name,
    ):
        """Sync the config with ClusterFuzz."""
        # Create/update ClusterFuzz jobs.
        for template in get_jobs_for_project(project, info):
            if template.engine == "none":
                # Engine-less jobs are not automatically managed.
                continue

            fuzzer_entity = self._fuzzer_entities.get(template.engine)
            if not fuzzer_entity:
                raise ProjectSetupError("Invalid fuzzing engine " +
                                        template.engine)

            job_name = template.job_name(project)
            job = data_types.Job.query(data_types.Job.name == job_name).get()
            if not job:
                job = data_types.Job()

            if job_name not in fuzzer_entity.jobs and not info.get(
                    "disabled", False):
                # Enable new job.
                fuzzer_entity.jobs.append(job_name)

            job.name = job_name
            if self._segregate_projects:
                job.platform = untrusted.platform_name(project, "linux")
            else:
                # TODO(ochang): Support other platforms?
                job.platform = "LINUX"

            job.templates = template.cf_job_templates

            job.environment_string = JOB_TEMPLATE.format(
                build_type=self._build_type,
                build_bucket_path=self._get_build_bucket_path(
                    project,
                    info,
                    template.engine,
                    template.memory_tool,
                    template.architecture,
                ),
                engine=template.engine,
                project=project,
            )

            if self._add_revision_mappings:
                revision_vars_url = self._revision_url_template.format(
                    project=project,
                    bucket=self._get_build_bucket(template.engine,
                                                  template.architecture),
                    sanitizer=template.memory_tool,
                )

                job.environment_string += "REVISION_VARS_URL = {revision_vars_url}\n".format(
                    revision_vars_url=revision_vars_url)

            if logs_bucket_name:
                job.environment_string += "FUZZ_LOGS_BUCKET = {logs_bucket}\n".format(
                    logs_bucket=logs_bucket_name)

            if corpus_bucket_name:
                job.environment_string += "CORPUS_BUCKET = {corpus_bucket}\n".format(
                    corpus_bucket=corpus_bucket_name)

            if quarantine_bucket_name:
                job.environment_string += "QUARANTINE_BUCKET = {quarantine_bucket}\n".format(
                    quarantine_bucket=quarantine_bucket_name)

            if backup_bucket_name:
                job.environment_string += "BACKUP_BUCKET = {backup_bucket}\n".format(
                    backup_bucket=backup_bucket_name)

            if self._add_info_labels:
                job.environment_string += "AUTOMATIC_LABELS = Proj-{project},Engine-{engine}\n".format(
                    project=project, engine=template.engine)

            help_url = info.get("help_url")
            if help_url:
                job.environment_string += "HELP_URL = %s\n" % help_url

            if template.experimental:
                job.environment_string += "EXPERIMENTAL = True\n"

            if template.minimize_job_override:
                minimize_job_override = template.minimize_job_override.job_name(
                    project)
                job.environment_string += ("MINIMIZE_JOB_OVERRIDE = %s\n" %
                                           minimize_job_override)

            view_restrictions = info.get("view_restrictions")
            if view_restrictions:
                if view_restrictions in ALLOWED_VIEW_RESTRICTIONS:
                    job.environment_string += (
                        "ISSUE_VIEW_RESTRICTIONS = %s\n" % view_restrictions)
                else:
                    logs.log_error(
                        "Invalid view restriction setting %s for project %s." %
                        (view_restrictions, project))

            selective_unpack = info.get("selective_unpack")
            if selective_unpack:
                job.environment_string += "UNPACK_ALL_FUZZ_TARGETS_AND_FILES = False\n"

            if (template.engine == "libfuzzer"
                    and template.architecture == "x86_64" and "dataflow"
                    in info.get("fuzzing_engines", DEFAULT_ENGINES)):
                # Dataflow binaries are built with dataflow sanitizer, but can be used
                # as an auxiliary build with libFuzzer builds (e.g. with ASan or UBSan).
                dataflow_build_bucket_path = self._get_build_bucket_path(
                    project_name=project,
                    info=info,
                    engine="dataflow",
                    memory_tool="dataflow",
                    architecture=template.architecture,
                )
                job.environment_string += (
                    "DATAFLOW_BUILD_BUCKET_PATH = %s\n" %
                    dataflow_build_bucket_path)

            if self._additional_vars:
                additional_vars = {}
                additional_vars.update(self._additional_vars.get("all", {}))

                engine_vars = self._additional_vars.get(template.engine, {})
                engine_sanitizer_vars = engine_vars.get(
                    template.memory_tool, {})
                additional_vars.update(engine_sanitizer_vars)

                for key, value in sorted(six.iteritems(additional_vars)):
                    job.environment_string += "{} = {}\n".format(
                        key,
                        str(value).encode("unicode-escape").decode("utf-8"))

            job.put()
    def setUp(self):
        test_helpers.patch_environ(self)
        flaskapp = flask.Flask('testflask')
        flaskapp.add_url_rule(
            '/schedule-ml-train-tasks',
            view_func=ml_train.Handler.as_view('/schedule-ml-train-tasks'))
        self.app = webtest.TestApp(flaskapp)

        test_helpers.patch(self, [
            'base.tasks.add_task', 'handlers.base_handler.Handler.is_cron',
            'metrics.logs.log_error'
        ])

        # Create fake jobs.
        data_types.Job(
            name='libfuzzer_asan',
            environment_string='ML_MODELS_TO_USE = rnn_generator').put()
        data_types.Job(name='libfuzzer_msan', environment_string='').put()
        data_types.Job(name='afl_asan', environment_string='').put()

        data_types.Job(
            name='libfuzzer_asan_gradientfuzz',
            environment_string='ML_MODELS_TO_USE = gradientfuzz\n').put()
        data_types.Job(
            name='libfuzzer_asan_all',
            environment_string=(
                'ML_MODELS_TO_USE = gradientfuzz, rnn_generator')).put()
        data_types.Job(
            name='libfuzzer_asan_invalid',
            environment_string='ML_MODELS_TO_USE = invalid_model\n').put()

        # Create fake fuzzers.
        data_types.Fuzzer(
            name='libFuzzer',
            jobs=['libfuzzer_asan', 'libfuzzer_asan_gradientfuzz']).put()
        data_types.Fuzzer(name='afl', jobs=['afl_asan']).put()

        # Create fake child fuzzers.
        data_types.FuzzTarget(engine='libFuzzer',
                              binary='fake_fuzzer',
                              project='test-project').put()
        data_types.FuzzTargetJob(fuzz_target_name='libFuzzer_fake_fuzzer',
                                 job='libfuzzer_asan').put()
        data_types.FuzzTarget(engine='afl',
                              binary='fake_fuzzer',
                              project='test-project').put()
        data_types.FuzzTargetJob(fuzz_target_name='afl_fake_fuzzer',
                                 job='afl_asan').put()

        data_types.FuzzTarget(engine='libFuzzer',
                              binary='fake_gradientfuzzer',
                              project='test-project').put()
        data_types.FuzzTargetJob(
            fuzz_target_name='libFuzzer_fake_gradientfuzzer',
            job='libfuzzer_asan_gradientfuzz').put()

        data_types.FuzzTarget(engine='libFuzzer',
                              binary='fake_all_fuzzer',
                              project='test-project').put()
        data_types.FuzzTargetJob(fuzz_target_name='libFuzzer_fake_all_fuzzer',
                                 job='libfuzzer_asan_all').put()
  def setUp(self):
    test_helpers.patch_environ(self)
    self.maxDiff = None  # pylint: disable=invalid-name

    data_types.Fuzzer(
        name='testFuzzer',
        stats_columns=('sum(t.blah) as blah, custom(j.new_crashes) '
                       'as new_crashes, _EDGE_COV as edge_coverage, '
                       '_FUNC_COV as func_coverage, '
                       '_CORPUS_SIZE as corpus_size, '
                       '_CORPUS_BACKUP as corpus_backup, '
                       '_QUARANTINE_SIZE as quarantine_size, '
                       '_COV_REPORT as coverage_report, '
                       '_FUZZER_RUN_LOGS as fuzzer_logs,'
                       '_PERFORMANCE_REPORT as performance_report'),
        stats_column_descriptions=(
            'blah: "blah description"\n'
            'func_coverage: "func coverage description"\n')).put()

    data_types.Fuzzer(
        name='testFuzzer2',
        stats_columns=('sum(t.blah) as blah, custom(j.new_crashes) '
                       'as new_crashes, _EDGE_COV as edge_coverage, '
                       '_FUNC_COV as func_coverage, '
                       '_CORPUS_SIZE as corpus_size, '
                       '_CORPUS_BACKUP as corpus_backup, '
                       '_QUARANTINE_SIZE as quarantine_size, '
                       '_COV_REPORT as coverage_report, '
                       '_FUZZER_RUN_LOGS as fuzzer_logs,'
                       '_PERFORMANCE_REPORT as performance_report'),
        stats_column_descriptions=(
            'blah: "blah description"\n'
            'func_coverage: "func coverage description"\n')).put()

    data_types.Job(
        name='job', environment_string='FUZZ_LOGS_BUCKET = bucket').put()

    now = datetime.datetime.utcnow()

    data_types.FuzzTarget(
        engine='testFuzzer', project='test-project', binary='1_fuzzer').put()
    data_types.FuzzTargetJob(
        fuzz_target_name='testFuzzer_1_fuzzer', job='job', last_run=now).put()

    data_types.FuzzTarget(
        engine='testFuzzer', project='test-project', binary='2_fuzzer').put()
    data_types.FuzzTargetJob(
        fuzz_target_name='testFuzzer_2_fuzzer', job='job', last_run=now).put()

    data_types.FuzzTarget(
        engine='testFuzzer', project='test-project', binary='3_fuzzer').put()
    data_types.FuzzTargetJob(
        fuzz_target_name='testFuzzer_3_fuzzer', job='job', last_run=now).put()

    data_types.FuzzTarget(
        engine='testFuzzer2', project='test-project', binary='1_fuzzer').put()
    data_types.FuzzTargetJob(
        fuzz_target_name='testFuzzer2_1_fuzzer', job='job', last_run=now).put()

    cov_info = data_types.CoverageInformation(
        fuzzer='2_fuzzer', date=datetime.date(2016, 10, 19))
    cov_info.edges_covered = 11
    cov_info.edges_total = 30
    cov_info.functions_covered = 10
    cov_info.functions_total = 15
    cov_info.html_report_url = 'https://report_for_2_fuzzer/20161019'
    cov_info.corpus_size_units = 20
    cov_info.corpus_size_bytes = 200
    cov_info.quarantine_size_units = 0
    cov_info.quarantine_size_bytes = 0
    cov_info.corpus_location = 'gs://corpus'
    cov_info.corpus_backup_location = 'gs://corpus-backup/file.zip'
    cov_info.quarantine_location = 'gs://quarantine'
    cov_info.put()

    cov_info = data_types.CoverageInformation(
        fuzzer='2_fuzzer', date=datetime.date(2016, 10, 21))
    cov_info.edges_covered = 15
    cov_info.edges_total = 30
    cov_info.functions_covered = 11
    cov_info.functions_total = 15
    cov_info.html_report_url = 'https://report_for_2_fuzzer/20161021'
    cov_info.corpus_size_units = 40
    cov_info.corpus_size_bytes = 400
    cov_info.quarantine_size_units = 8
    cov_info.quarantine_size_bytes = 80
    cov_info.corpus_location = 'gs://corpus'
    cov_info.corpus_backup_location = 'gs://corpus-backup/file.zip'
    cov_info.quarantine_location = 'gs://quarantine'
    cov_info.put()

    cov_info = data_types.CoverageInformation(
        fuzzer='1_fuzzer', date=datetime.date(2016, 10, 20))
    cov_info.edges_covered = 17
    cov_info.edges_total = 38
    cov_info.functions_covered = 12
    cov_info.functions_total = 19
    cov_info.html_report_url = 'https://report_for_1_fuzzer/20161020'
    cov_info.corpus_size_units = 47
    cov_info.corpus_size_bytes = 480
    cov_info.quarantine_size_units = 3
    cov_info.quarantine_size_bytes = 8
    cov_info.corpus_location = 'gs://corpus'
    cov_info.corpus_backup_location = 'gs://corpus-backup/file.zip'
    cov_info.quarantine_location = 'gs://quarantine'
    cov_info.put()

    self.client = mock.Mock(spec_set=big_query.Client)
    test_helpers.patch(self, [
        'google_cloud_utils.big_query.Client',
    ])
    self.mock.Client.return_value = self.client
    def setUp(self):
        helpers.patch_environ(self)
        project_config_get = local_config.ProjectConfig.get
        helpers.patch(self, [
            'base.utils.default_project_name',
            'config.db_config.get',
            ('project_config_get', 'config.local_config.ProjectConfig.get'),
        ])

        self.job = data_types.Job(
            name='linux_asan_chrome',
            environment_string=('SUMMARY_PREFIX = project\n'
                                'PROJECT_NAME = project\n'
                                'HELP_URL = help_url\n'))
        self.job2 = data_types.Job(
            name='windows_asan_chrome',
            environment_string=('SUMMARY_PREFIX = project\n'
                                'PROJECT_NAME = project\n'
                                'HELP_URL = help_url\n'))
        self.testcase = data_types.Testcase(
            job_type='linux_asan_chrome',
            fuzzer_name='libfuzzer_binary_name',
            crash_type='Crash-type',
            crash_address='0x1337',
            crash_state='A\nB\nC\n')
        self.testcase.set_metadata('fuzzer_binary_name',
                                   'binary_name',
                                   update_testcase=False)

        self.testcase_assert = data_types.Testcase(
            job_type='linux_asan_chrome',
            fuzzer_name='libfuzzer_binary_name',
            crash_type='ASSERT',
            crash_address='0x1337',
            crash_state='foo != bar\nB\nC\n')
        self.testcase_assert.set_metadata('fuzzer_binary_name',
                                          'binary_name',
                                          update_testcase=False)

        self.testcase_null = data_types.Testcase(
            job_type='linux_asan_chrome',
            fuzzer_name='libfuzzer_binary_name',
            crash_type='UNKNOWN',
            crash_address='0x1337',
            crash_state='NULL')

        self.testcase_bad_cast = data_types.Testcase(
            job_type='linux_asan_chrome',
            fuzzer_name='libfuzzer_binary_name',
            crash_type='Bad-cast',
            crash_address='0x1337',
            crash_state=
            ('Bad-cast to blink::LayoutBlock from blink::LayoutTableSection\n'
             'blink::LayoutObject::ContainerForFixedPosition\n'
             'blink::LayoutObject::Container\n'))

        self.testcase_bad_cast_without_crash_function = data_types.Testcase(
            job_type='linux_asan_chrome',
            fuzzer_name='libfuzzer_binary_name',
            crash_type='Bad-cast',
            crash_address='0x1337',
            crash_state=
            ('Bad-cast to blink::LayoutBlock from blink::LayoutTableSection\n'
             ))

        self.local_data_bundle = data_types.DataBundle(
            name='local_data_bundle')
        self.cloud_data_bundle = data_types.DataBundle(
            name='cloud_data_bundle')

        self.fuzzer1 = data_types.Fuzzer(name='fuzzer1',
                                         data_bundle_name=None,
                                         jobs=['linux_asan_chrome'])
        self.fuzzer2 = data_types.Fuzzer(name='fuzzer2',
                                         data_bundle_name='local_data_bundle',
                                         jobs=['linux_asan_chrome'])
        self.fuzzer3 = data_types.Fuzzer(name='fuzzer3',
                                         data_bundle_name='cloud_data_bundle',
                                         jobs=['linux_asan_chrome'])

        entities_to_put = [
            self.testcase, self.testcase_assert, self.testcase_null,
            self.testcase_bad_cast,
            self.testcase_bad_cast_without_crash_function, self.job, self.job2,
            self.local_data_bundle, self.cloud_data_bundle, self.fuzzer1,
            self.fuzzer2, self.fuzzer3
        ]
        for entity in entities_to_put:
            entity.put()

        environment.set_value('FUZZ_DATA',
                              '/tmp/inputs/fuzzer-common-data-bundles')
        environment.set_value('FUZZERS_DIR', '/tmp/inputs/fuzzers')
        self.mock.default_project_name.return_value = 'project'
        self.mock.project_config_get.side_effect = project_config_get
    def setUp(self):
        """Set up."""
        super(CorpusPruningTestUntrusted, self).setUp()
        environment.set_value('JOB_NAME', 'libfuzzer_asan_job')

        helpers.patch(self, [
            'bot.fuzzers.engine.get', 'bot.tasks.setup.get_fuzzer_directory',
            'base.tasks.add_task',
            'bot.tasks.corpus_pruning_task._record_cross_pollination_stats'
        ])

        self.mock.get.return_value = libFuzzer_engine.LibFuzzerEngine()
        self.mock.get_fuzzer_directory.return_value = os.path.join(
            environment.get_value('ROOT_DIR'), 'src', 'python', 'bot',
            'fuzzers', 'libFuzzer')
        self.corpus_bucket = os.environ['CORPUS_BUCKET']
        self.quarantine_bucket = os.environ['QUARANTINE_BUCKET']
        self.backup_bucket = os.environ['BACKUP_BUCKET']

        job = data_types.Job(
            name='libfuzzer_asan_job',
            environment_string=(
                'APP_NAME = test_fuzzer\n'
                'CORPUS_BUCKET = {corpus_bucket}\n'
                'QUARANTINE_BUCKET = {quarantine_bucket}\n'
                'BACKUP_BUCKET={backup_bucket}\n'
                'RELEASE_BUILD_BUCKET_PATH = '
                'gs://clusterfuzz-test-data/test_libfuzzer_builds/'
                'test-libfuzzer-build-([0-9]+).zip\n'
                'REVISION_VARS_URL = gs://clusterfuzz-test-data/'
                'test_libfuzzer_builds/'
                'test-libfuzzer-build-%s.srcmap.json\n'.format(
                    corpus_bucket=self.corpus_bucket,
                    quarantine_bucket=self.quarantine_bucket,
                    backup_bucket=self.backup_bucket)))
        job.put()

        job = data_types.Job(
            name='libfuzzer_asan_job2',
            environment_string=(
                'APP_NAME = test2_fuzzer\n'
                'BACKUP_BUCKET = {backup_bucket}\n'
                'CORPUS_FUZZER_NAME_OVERRIDE = libfuzzer\n'.format(
                    backup_bucket=self.backup_bucket)))
        job.put()

        os.environ['PROJECT_NAME'] = 'oss-fuzz'
        data_types.FuzzTarget(engine='libFuzzer',
                              project='test',
                              binary='test_fuzzer').put()
        data_types.FuzzTargetJob(fuzz_target_name='libFuzzer_test_fuzzer',
                                 engine='libFuzzer',
                                 job='libfuzzer_asan_job',
                                 last_run=datetime.datetime.now()).put()

        data_types.FuzzTarget(engine='libFuzzer',
                              project='test2',
                              binary='fuzzer').put()
        data_types.FuzzTargetJob(fuzz_target_name='libFuzzer_test2_fuzzer',
                                 engine='libFuzzer',
                                 job='libfuzzer_asan_job2',
                                 last_run=datetime.datetime.now()).put()

        environment.set_value('USE_MINIJAIL', True)
        environment.set_value('SHARED_CORPUS_BUCKET', TEST_SHARED_BUCKET)

        # Set up remote corpora.
        self.corpus = corpus_manager.FuzzTargetCorpus('libFuzzer',
                                                      'test_fuzzer')
        self.corpus.rsync_from_disk(os.path.join(TEST_DIR, 'corpus'),
                                    delete=True)

        self.quarantine_corpus = corpus_manager.FuzzTargetCorpus(
            'libFuzzer', 'test_fuzzer', quarantine=True)
        self.quarantine_corpus.rsync_from_disk(os.path.join(
            TEST_DIR, 'quarantine'),
                                               delete=True)

        self.mock.get_data_bundle_bucket_name.return_value = TEST_GLOBAL_BUCKET
        data_types.DataBundle(name='bundle',
                              is_local=True,
                              sync_to_worker=True).put()

        data_types.Fuzzer(revision=1,
                          file_size='builtin',
                          source='builtin',
                          name='libFuzzer',
                          max_testcases=4,
                          builtin=True,
                          data_bundle_name='bundle').put()

        self.temp_dir = tempfile.mkdtemp()

        # Copy corpus backup in the older date format.
        corpus_backup_date = (
            datetime.datetime.utcnow().date() - datetime.timedelta(
                days=data_types.CORPUS_BACKUP_PUBLIC_LOOKBACK_DAYS))
        corpus_backup_dir = ('gs://{bucket}/corpus/libfuzzer/test2_fuzzer/')
        gsutil.GSUtilRunner().run_gsutil([
            'cp', (corpus_backup_dir +
                   'backup.zip').format(bucket=TEST2_BACKUP_BUCKET),
            (corpus_backup_dir +
             '%s.zip' % corpus_backup_date).format(bucket=self.backup_bucket)
        ])
 def test_with_main_project(self):
     """Test find_fuzz_target with a target in the main project."""
     data_types.Job(name='job', environment_string='').put()
     self.assertEqual(
         ('libFuzzer_binary', 'binary'),
         upload_testcase.find_fuzz_target('libFuzzer', 'binary', 'job'))