Exemple #1
0
def generate_csrf_token(length=64, valid_seconds=3600, html=False):
    """Generate a CSRF token."""
    now = utils.utcnow()
    valid_token = None

    # Clean up expired tokens to prevent junk from building up in the datastore.
    tokens = data_types.CSRFToken.query(
        data_types.CSRFToken.user_email == helpers.get_user_email())
    tokens_to_delete = []
    for token in tokens:
        if token.expiration_time > now:
            valid_token = token
            continue
        tokens_to_delete.append(token.key)
    ndb.delete_multi(tokens_to_delete)

    # Generate a new token.
    if not valid_token:
        valid_token = data_types.CSRFToken()
        valid_token.value = base64.b64encode(os.urandom(length))
        valid_token.expiration_time = (
            now + datetime.timedelta(seconds=valid_seconds))
        valid_token.user_email = helpers.get_user_email()
        valid_token.put()

    value = valid_token.value
    if html:
        return '<input type="hidden" name="csrf_token" value="%s" />' % value
    return value
def update_fuzzer_jobs(fuzzer_entities, project_names):
    """Update fuzzer job mappings."""
    to_delete = []

    for job in data_types.Job.query():
        if not job.environment_string:
            continue

        job_environment = job.get_environment()
        if not job_environment.get('MANAGED', False):
            continue

        job_project = job_environment['PROJECT_NAME']
        if job_project in project_names:
            continue

        to_delete.append(job.key)
        for fuzzer_entity in fuzzer_entities:
            try:
                fuzzer_entity.jobs.remove(job.name)
            except ValueError:
                pass

    for fuzzer_entity in fuzzer_entities:
        fuzzer_entity.put()
        fuzzer_selection.update_mappings_for_fuzzer(fuzzer_entity)

    if to_delete:
        ndb.delete_multi(to_delete)
Exemple #3
0
def cleanup_reports_metadata():
    """Delete ReportMetadata for uploaded reports."""
    uploaded_reports = ndb_utils.get_all_from_query(
        data_types.ReportMetadata.query(
            ndb_utils.is_true(data_types.ReportMetadata.is_uploaded)),
        keys_only=True)
    ndb.delete_multi(uploaded_reports)
Exemple #4
0
def sync_user_permissions(project, info):
  """Sync permissions of project based on project.yaml."""
  ccs = ccs_from_info(info)

  for template in get_jobs_for_project(project, info):
    job_name = template.job_name(project)

    # Delete removed CCs.
    existing_ccs = data_types.ExternalUserPermission.query(
        data_types.ExternalUserPermission.entity_kind ==
        data_types.PermissionEntityKind.JOB,
        data_types.ExternalUserPermission.entity_name == job_name)
    ndb.delete_multi([
        permission.key
        for permission in existing_ccs
        if permission.email not in ccs
    ])

    for cc in ccs:
      query = data_types.ExternalUserPermission.query(
          data_types.ExternalUserPermission.email == cc,
          data_types.ExternalUserPermission.entity_kind ==
          data_types.PermissionEntityKind.JOB,
          data_types.ExternalUserPermission.entity_name == job_name)

      existing_permission = query.get()
      if existing_permission:
        continue

      data_types.ExternalUserPermission(
          email=cc,
          entity_kind=data_types.PermissionEntityKind.JOB,
          entity_name=job_name,
          is_prefix=False,
          auto_cc=data_types.AutoCCType.ALL).put()
def update_mappings_for_fuzzer(fuzzer, mappings=None):
    """Clear existing mappings for a fuzzer, and replace them."""
    if mappings is None:
        mappings = fuzzer.jobs

    query = data_types.FuzzerJob.query()
    query = query.filter(data_types.FuzzerJob.fuzzer == fuzzer.name)
    entities = ndb_utils.get_all_from_query(query)
    old_mappings = {}
    for entity in entities:
        old_mappings[(entity.job, entity.platform)] = entity

    new_mappings = []
    for job_name in mappings:
        job = data_types.Job.query(data_types.Job.name == job_name).get()
        if not job:
            logs.log_error('An unknown job %s was selected for fuzzer %s.' %
                           (job_name, fuzzer.name))
            continue

        mapping = old_mappings.pop((job_name, job.platform), None)
        if mapping:
            continue

        mapping = data_types.FuzzerJob()
        mapping.fuzzer = fuzzer.name
        mapping.job = job_name
        mapping.platform = job.platform
        new_mappings.append(mapping)

    ndb.put_multi(new_mappings)
    ndb.delete_multi([m.key for m in list(old_mappings.values())])
def _query_and_upload_strategy_probabilities(engine):
  """Uploads queried data into datastore.

  Calls query functions and uploads query results
  to datastore to use as new probabilities. Probabilities
  are based on new_edges feature."""
  strategy_data = []
  data = _query_multi_armed_bandit_probabilities(engine)
  logs.log('Queried distribution for {}.'.format(engine.name))

  # TODO(mukundv): Update once we choose a temperature parameter for final
  # implementation.
  for row in data:
    curr_strategy = data_types.FuzzStrategyProbability()
    curr_strategy.strategy_name = str(row['strategy'])
    curr_strategy.probability = float(row['bandit_weight'])
    curr_strategy.engine = engine.name
    strategy_data.append(curr_strategy)

  query = data_types.FuzzStrategyProbability.query(
      data_types.FuzzStrategyProbability.engine == engine.name)
  ndb.delete_multi(
      [entity.key for entity in ndb_utils.get_all_from_query(query)])
  ndb.put_multi(strategy_data)
  logs.log('Uploaded queried distribution to ndb for {}'.format(engine.name))
  _store_probabilities_in_bigquery(engine, data)
  logs.log('Uploaded queried distribution to BigQuery for {}'.format(
      engine.name))
Exemple #7
0
def _query_and_upload_strategy_probabilities():
    """Uploads queried data into datastore.

  Calls query functions and uploads query results
  to datastore to use as new probabilities. Probabilities
  are based on new_edges feature."""
    strategy_data = []
    data = _query_multi_armed_bandit_probabilities()

    # TODO(mukundv): Update once we choose a temperature parameter for final
    # implementation.
    for row in data:
        curr_strategy = data_types.FuzzStrategyProbability()
        curr_strategy.strategy_name = str(row['strategy'])
        curr_strategy.probability_high_temperature = float(
            row['bandit_weight_high_temperature'])
        curr_strategy.probability_low_temperature = float(
            row['bandit_weight_low_temperature'])
        curr_strategy.probability_medium_temperature = float(
            row['bandit_weight_medium_temperature'])
        strategy_data.append(curr_strategy)

    ndb.delete_multi([
        entity.key for entity in ndb_utils.get_all_from_model(
            data_types.FuzzStrategyProbability)
    ])
    ndb.put_multi(strategy_data)
    _store_probabilities_in_bigquery(data)
Exemple #8
0
def execute_task(*_):
    """Execute the report uploads."""
    logs.log('Uploading pending reports.')

    # Get metadata for reports requiring upload.
    reports_metadata = ndb_utils.get_all_from_query(
        data_types.ReportMetadata.query(
            ndb_utils.is_false(data_types.ReportMetadata.is_uploaded)))
    reports_metadata = list(reports_metadata)
    if not reports_metadata:
        logs.log('No reports that need upload found.')
        return

    environment.set_value('UPLOAD_MODE', 'prod')

    # Otherwise, upload corresponding reports.
    logs.log('Uploading reports for testcases: %s' %
             str([report.testcase_id for report in reports_metadata]))

    report_metadata_to_delete = []
    for report_metadata in reports_metadata:
        # Convert metadata back into actual report.
        crash_info = crash_uploader.crash_report_info_from_metadata(
            report_metadata)
        testcase_id = report_metadata.testcase_id

        try:
            _ = data_handler.get_testcase_by_id(testcase_id)
        except errors.InvalidTestcaseError:
            logs.log_warn('Could not find testcase %s.' % testcase_id)
            report_metadata_to_delete.append(report_metadata.key)
            continue

        # Upload the report and update the corresponding testcase info.
        logs.log('Processing testcase %s for crash upload.' % testcase_id)
        crash_report_id = crash_info.upload()
        if crash_report_id is None:
            logs.log_error(
                'Crash upload for testcase %s failed, retry later.' %
                testcase_id)
            continue

        # Update the report metadata to indicate successful upload.
        report_metadata.crash_report_id = crash_report_id
        report_metadata.is_uploaded = True
        report_metadata.put()

        logs.log('Uploaded testcase %s to crash, got back report id %s.' %
                 (testcase_id, crash_report_id))

    # Delete report metadata entries where testcase does not exist anymore or
    # upload is not supported.
    if report_metadata_to_delete:
        ndb.delete_multi(report_metadata_to_delete)

    # Log done with uploads.
    # Deletion happens in batches in cleanup_task, so that in case of error there
    # is some buffer for looking at stored ReportMetadata in the meantime.
    logs.log('Finished uploading crash reports.')
Exemple #9
0
    def cleanup_old_assignments(self, host_names):
        """Remove old OSS-Fuzz host worker assignment entries."""
        to_delete = []
        for assignment in data_types.HostWorkerAssignment.query():
            if assignment.host_name not in host_names:
                to_delete.append(assignment.key)

        ndb.delete_multi(to_delete)
Exemple #10
0
def cleanup_unused_heartbeats():
    """Clean up unused heartbeat entities."""
    cutoff_time = utils.utcnow() - datetime.timedelta(
        days=UNUSED_HEARTBEAT_THRESHOLD)
    unused_heartbeats = ndb_utils.get_all_from_query(
        data_types.Heartbeat.query(
            data_types.Heartbeat.last_beat_time < cutoff_time),
        keys_only=True)

    ndb.delete_multi(unused_heartbeats)
Exemple #11
0
def cleanup_old_projects_settings(project_names):
  """Delete old projects that are no longer used or disabled."""
  to_delete = []

  for project in data_types.OssFuzzProject.query():
    if project.name not in project_names:
      to_delete.append(project.key)

  if to_delete:
    ndb.delete_multi(to_delete)
Exemple #12
0
    def cleanup_old_projects(self, existing_project_names):
        """Cleanup old projects."""
        to_delete = []

        for project_info in list(data_types.OssFuzzProjectInfo.query()):
            if project_info.name in existing_project_names:
                continue

            logging.info('Deleting %s', project_info.name)

            for cluster_info in project_info.clusters:
                self.delete_gce_resources(project_info, cluster_info)

            to_delete.append(project_info.key)

        ndb.delete_multi(to_delete)
Exemple #13
0
def cleanup_unused_fuzz_targets_and_jobs():
    """Clean up unused FuzzTarget and FuzzTargetJob entities."""
    last_run_cutoff = utils.utcnow() - datetime.timedelta(
        days=FUZZ_TARGET_UNUSED_THRESHOLD)

    unused_target_jobs = data_types.FuzzTargetJob.query(
        data_types.FuzzTargetJob.last_run < last_run_cutoff)
    valid_target_jobs = data_types.FuzzTargetJob.query(
        data_types.FuzzTargetJob.last_run >= last_run_cutoff)

    to_delete = [t.key for t in unused_target_jobs]

    valid_fuzz_targets = set(t.fuzz_target_name for t in valid_target_jobs)
    for fuzz_target in ndb_utils.get_all_from_model(data_types.FuzzTarget):
        if fuzz_target.fully_qualified_name() not in valid_fuzz_targets:
            to_delete.append(fuzz_target.key)

    ndb.delete_multi(to_delete)
Exemple #14
0
def cleanup_old_jobs(project_names):
  """Delete old jobs that are no longer used."""
  to_delete = []

  for job in data_types.Job.query():
    if not job.environment_string:
      continue

    job_environment = job.get_environment()
    if not job_environment.get('MANAGED', False):
      continue

    job_project = job_environment['PROJECT_NAME']
    if job_project not in project_names:
      to_delete.append(job.key)

  if to_delete:
    ndb.delete_multi(to_delete)
Exemple #15
0
def cleanup_global_blacklist():
    """Cleans out closed and deleted testcases from the global blacklist."""
    blacklists_to_delete = []
    global_blacklists = data_types.Blacklist.query(
        data_types.Blacklist.tool_name == LSAN_TOOL_NAME)
    for blacklist in global_blacklists:
        testcase_id = blacklist.testcase_id

        try:
            testcase = data_handler.get_testcase_by_id(testcase_id)
        except errors.InvalidTestcaseError:
            testcase = None

        # Delete entry if testcase is closed, deleted, or unreproducible.
        if not testcase or not testcase.open or testcase.one_time_crasher_flag:
            blacklists_to_delete.append(blacklist.key)

    ndb.delete_multi(blacklists_to_delete)
Exemple #16
0
def _query_and_upload_strategy_probabilities():
    """Uploads queried data into datastore.

  Calls query functions and uploads query results
  to datastore to use as new probabilities. Probabilities
  are based on new_edges feature."""
    strategy_data = []
    data = _query_multi_armed_bandit_probabilities()

    for row in data:
        curr_strategy = data_types.FuzzStrategyProbability()
        curr_strategy.strategy_name = str(row['strategy'])
        curr_strategy.probability = float(row['bandit_weight'])
        strategy_data.append(curr_strategy)

    ndb.delete_multi([
        entity.key for entity in ndb_utils.get_all_from_model(
            data_types.FuzzStrategyProbability)
    ])
    ndb.put_multi(strategy_data)
    _store_probabilities_in_bigquery(data)
Exemple #17
0
def update_admins(new_admins):
    """Update list of admins."""
    existing_admins = ndb_utils.get_all_from_model(data_types.Admin)

    to_remove = []
    existing_admin_emails = set()
    for admin in existing_admins:
        if admin.email not in new_admins:
            logs.log('Removing admin ' + admin.email)
            to_remove.append(admin.key)

        existing_admin_emails.add(admin.email)

    ndb.delete_multi(to_remove)

    to_add = []
    for admin in new_admins:
        if admin not in existing_admin_emails:
            to_add.append(data_types.Admin(id=admin, email=admin))
            logs.log('Adding admin ' + admin)

    ndb.put_multi(to_add)
Exemple #18
0
def redo_testcase(testcase, tasks, user_email):
    """Redo specific tasks for a testcase."""
    for task in tasks:
        if task not in VALID_REDO_TASKS:
            raise InvalidRedoTask(task)

    minimize = 'minimize' in tasks
    regression = 'regression' in tasks
    progression = 'progression' in tasks
    impact = 'impact' in tasks
    blame = 'blame' in tasks

    task_list = []
    testcase_id = testcase.key.id()

    # Metadata keys to clear based on which redo tasks were selected.
    metadata_keys_to_clear = ['potentially_flaky']

    if minimize:
        task_list.append('minimize')
        testcase.minimized_keys = ''
        metadata_keys_to_clear += [
            'current_minimization_phase_attempts', 'minimization_phase'
        ]

        # If this testcase was archived during minimization, update the state.
        testcase.archive_state &= ~data_types.ArchiveStatus.MINIMIZED

    if regression:
        task_list.append('regression')
        testcase.regression = ''
        metadata_keys_to_clear += [
            'last_regression_min', 'last_regression_max'
        ]

    if progression:
        task_list.append('progression')
        testcase.fixed = ''
        testcase.open = True
        testcase.last_tested_crash_stacktrace = None
        testcase.triaged = False
        testcase.set_metadata('progression_pending',
                              True,
                              update_testcase=False)
        metadata_keys_to_clear += [
            'last_progression_min', 'last_progression_max',
            'last_tested_revision'
        ]

    if impact:
        task_list.append('impact')
        testcase.is_impact_set_flag = False

    if blame:
        task_list.append('blame')
        testcase.set_metadata('blame_pending', True, update_testcase=False)
        testcase.set_metadata('predator_result', None, update_testcase=False)

    for key in metadata_keys_to_clear:
        testcase.delete_metadata(key, update_testcase=False)

    testcase.comments += '[%s] %s: Redo task(s): %s\n' % (
        utils.current_date_time(), user_email, ', '.join(sorted(task_list)))
    testcase.one_time_crasher_flag = False
    testcase.put()

    # Allow new notifications to be sent for this testcase.
    notifications = ndb_utils.get_all_from_query(data_types.Notification.query(
        data_types.Notification.testcase_id == testcase.key.id()),
                                                 keys_only=True)
    ndb.delete_multi(notifications)

    # If we are re-doing minimization, other tasks will be done automatically
    # after minimization completes. So, don't add those tasks.
    if minimize:
        add_task('minimize', testcase_id, testcase.job_type,
                 queue_for_testcase(testcase))
    else:
        if regression:
            add_task('regression', testcase_id, testcase.job_type,
                     queue_for_testcase(testcase))

        if progression:
            add_task('progression', testcase_id, testcase.job_type,
                     queue_for_testcase(testcase))

        if impact:
            add_task('impact', testcase_id, testcase.job_type,
                     queue_for_testcase(testcase))

        if blame:
            add_task('blame', testcase_id, testcase.job_type,
                     queue_for_testcase(testcase))