Exemplo n.º 1
0
    def get(self):
        """Handle a GET request."""
        jobs = ndb_utils.get_all_from_model(data_types.Job)
        for job in jobs:
            job_environment = job.get_environment()
            if utils.string_is_true(job_environment.get('EXPERIMENTAL')):
                # Don't use corpus backups from experimental jobs. Skip.
                continue

            if not utils.string_is_true(job_environment.get('CORPUS_PRUNE')):
                # There won't be any corpus backups for these jobs. Skip.
                continue

            corpus_backup_bucket_name = job_environment.get('BACKUP_BUCKET')
            if not corpus_backup_bucket_name:
                # No backup bucket found. Skip.
                continue

            corpus_fuzzer_name_override = job_environment.get(
                'CORPUS_FUZZER_NAME_OVERRIDE')

            target_jobs = list(
                fuzz_target_utils.get_fuzz_target_jobs(job=job.name))
            fuzz_targets = fuzz_target_utils.get_fuzz_targets_for_target_jobs(
                target_jobs)

            for target in fuzz_targets:
                _make_corpus_backup_public(target, corpus_fuzzer_name_override,
                                           corpus_backup_bucket_name)
Exemplo n.º 2
0
def get_jobs_and_platforms_for_top_crashes():
    """Return list of jobs and platforms to use for picking top crashes."""
    jobs = set()
    platforms = set()

    all_jobs = ndb_utils.get_all_from_model(data_types.Job)
    for job in all_jobs:
        job_environment = job.get_environment()

        # Skip experimental jobs.
        if utils.string_is_true(job_environment.get('EXPERIMENTAL')):
            continue

        # Skip custom binary jobs.
        if (utils.string_is_true(job_environment.get('CUSTOM_BINARY'))
                or job_environment.get('SYSTEM_BINARY_DIR')):
            continue

        # Skip if explicitly excluded using flag.
        if utils.string_is_true(
                job_environment.get('EXCLUDE_FROM_TOP_CRASHES')):
            continue

        jobs.add(job.name)
        platforms.add(job_platform_to_real_platform(job.platform))

    return jobs, platforms
Exemplo n.º 3
0
    def get(self):
        """Handle a GET request."""
        for job in data_types.Job.query():

            if not utils.string_is_true(
                    job.get_environment().get('USE_CORPUS_FOR_ML')):
                continue

            task_list = []
            if utils.string_is_true(
                    job.get_environment().get('USE_GRADIENTFUZZ')):
                task_list.append('train_gradientfuzz')
            if utils.string_is_true(
                    job.get_environment().get('USE_RNN_GENERATOR')):
                task_list.append('train_rnn_generator')

            if len(task_list) == 0:
                continue

            target_jobs = list(
                fuzz_target_utils.get_fuzz_target_jobs(job=job.name))
            fuzz_targets = fuzz_target_utils.get_fuzz_targets_for_target_jobs(
                target_jobs)

            for task_name in task_list:
                for target in fuzz_targets:
                    tasks.add_task(task_name,
                                   target.project_qualified_name(),
                                   job.name,
                                   queue=tasks.ML_JOBS_TASKQUEUE)
Exemplo n.º 4
0
    def get(self):
        """Handle a GET request."""
        jobs = ndb_utils.get_all_from_model(data_types.Job)
        default_backup_bucket = utils.default_backup_bucket()
        for job in jobs:
            job_environment = job.get_environment()
            if utils.string_is_true(job_environment.get("EXPERIMENTAL")):
                # Don't use corpus backups from experimental jobs. Skip.
                continue

            if not utils.string_is_true(job_environment.get("CORPUS_PRUNE")):
                # There won't be any corpus backups for these jobs. Skip.
                continue

            corpus_backup_bucket_name = job_environment.get(
                "BACKUP_BUCKET", default_backup_bucket)
            if not corpus_backup_bucket_name:
                # No backup bucket found. Skip.
                continue

            corpus_fuzzer_name_override = job_environment.get(
                "CORPUS_FUZZER_NAME_OVERRIDE")

            target_jobs = list(
                fuzz_target_utils.get_fuzz_target_jobs(job=job.name))
            fuzz_targets = fuzz_target_utils.get_fuzz_targets_for_target_jobs(
                target_jobs)

            for target in fuzz_targets:
                if not target:
                    # This is expected if any fuzzer/job combinations become outdated.
                    continue

                _make_corpus_backup_public(target, corpus_fuzzer_name_override,
                                           corpus_backup_bucket_name)
Exemplo n.º 5
0
def get_jobs_and_platforms_for_project():
  """Return a map of projects to jobs and platforms map to use for picking top
  crashes."""
  all_jobs = ndb_utils.get_all_from_model(data_types.Job)
  projects_to_jobs_and_platforms = {}
  for job in all_jobs:
    job_environment = job.get_environment()

    # Skip experimental jobs.
    if utils.string_is_true(job_environment.get('EXPERIMENTAL')):
      continue

    # Skip custom binary jobs.
    if (utils.string_is_true(job_environment.get('CUSTOM_BINARY')) or
        job_environment.get('SYSTEM_BINARY_DIR')):
      continue

    # Skip if explicitly excluded using flag.
    if utils.string_is_true(job_environment.get('EXCLUDE_FROM_TOP_CRASHES')):
      continue

    if job.project not in projects_to_jobs_and_platforms:
      projects_to_jobs_and_platforms[job.project] = ProjectMap(set(), set())

    projects_to_jobs_and_platforms[job.project].jobs.add(job.name)
    projects_to_jobs_and_platforms[job.project].platforms.add(
        job_platform_to_real_platform(job.platform))

  return projects_to_jobs_and_platforms
Exemplo n.º 6
0
def create_variant_tasks_if_needed(testcase):
    """Creates a variant task if needed."""
    if testcase.duplicate_of:
        # If another testcase exists with same params, no need to spend cycles on
        # calculating variants again.
        return

    testcase_id = testcase.key.id()
    project = data_handler.get_project_name(testcase.job_type)
    jobs = data_types.Job.query(data_types.Job.project == project)
    for job in jobs:
        # The variant needs to be tested in a different job type than us.
        job_type = job.name
        if testcase.job_type == job_type:
            continue

        # Don't try to reproduce engine fuzzer testcase with blackbox fuzzer
        # testcases and vice versa.
        if (environment.is_engine_fuzzer_job(testcase.job_type) !=
                environment.is_engine_fuzzer_job(job_type)):
            continue

        # Skip experimental jobs.
        job_environment = job.get_environment()
        if utils.string_is_true(job_environment.get('EXPERIMENTAL')):
            continue

        queue = tasks.queue_for_platform(job.platform)
        tasks.add_task('variant', testcase_id, job_type, queue)

        variant = data_handler.get_testcase_variant(testcase_id, job_type)
        variant.status = data_types.TestcaseVariantStatus.PENDING
        variant.put()
Exemplo n.º 7
0
def update_fuzzer_jobs(fuzzer_entities, project_names):
  """Update fuzzer job mappings."""
  to_delete = []

  for job in data_types.Job.query():
    if not job.environment_string:
      continue

    job_environment = job.get_environment()
    if not utils.string_is_true(job_environment.get('MANAGED', 'False')):
      continue

    job_project = job_environment['PROJECT_NAME']
    if job_project in project_names:
      continue

    logs.log('Deleting job %s' % job.name)
    to_delete.append(job.key)
    for fuzzer_entity in fuzzer_entities:
      try:
        fuzzer_entity.jobs.remove(job.name)
      except ValueError:
        pass

  for fuzzer_entity in fuzzer_entities:
    fuzzer_entity.put()
    fuzzer_selection.update_mappings_for_fuzzer(fuzzer_entity)

  if to_delete:
    ndb_utils.delete_multi(to_delete)
Exemplo n.º 8
0
def _get_project_results_for_jobs(jobs):
  """Return projects for jobs."""
  projects = {}
  for job in sorted(jobs, key=lambda j: j.name):
    project_name = job.get_environment().get('PROJECT_NAME', job.name)
    if project_name not in projects:
      projects[project_name] = {'name': project_name, 'jobs': []}

    if utils.string_is_true(job.get_environment().get('CORPUS_PRUNE')):
      projects[project_name]['coverage_job'] = job.name

    engine_display_name, engine_name = _get_engine_names(job.name)
    projects[project_name]['jobs'].append({
        'engine_display_name':
            engine_display_name,
        'engine_name':
            engine_name,
        'sanitizer_string':
            environment.get_memory_tool_display_string(job.name),
        'name':
            job.name,
        'single_target':
            get_single_fuzz_target_or_none(project_name, engine_name),
        'has_stats':
            True
    })

  projects = projects.values()
  projects.sort(key=_sort_by_name)
  for project in projects:
    project['jobs'].sort(key=_sort_by_name)

  return projects
Exemplo n.º 9
0
def create_variant_tasks_if_needed(testcase):
    """Creates a variant task if needed."""
    testcase_id = testcase.key.id()
    jobs = ndb_utils.get_all_from_model(data_types.Job)
    for job in jobs:
        # The variant needs to be tested in a different job type than us.
        job_type = job.name
        if testcase.job_type == job_type:
            continue

        # Don't try to reproduce engine fuzzer testcase with blackbox fuzzer
        # testcases and vice versa.
        if (environment.is_engine_fuzzer_job(testcase.job_type) !=
                environment.is_engine_fuzzer_job(job_type)):
            continue

        # Skip experimental jobs.
        job_environment = job.get_environment()
        if utils.string_is_true(job_environment.get('EXPERIMENTAL')):
            continue

        # Don't look for variants in other projects.
        project_name = data_handler.get_project_name(job_type)
        if testcase.project_name != project_name:
            continue

        queue = tasks.queue_for_platform(job.platform)
        tasks.add_task('variant', testcase_id, job_type, queue)

        variant = data_handler.get_testcase_variant(testcase_id, job_type)
        variant.status = data_types.TestcaseVariantStatus.PENDING
        variant.put()
Exemplo n.º 10
0
def _get_excluded_jobs():
    """Return list of jobs excluded from bug filing."""
    excluded_jobs = []

    jobs = ndb_utils.get_all_from_model(data_types.Job)
    for job in jobs:
        job_environment = job.get_environment()

        # Exclude experimental jobs.
        if utils.string_is_true(job_environment.get('EXPERIMENTAL')):
            excluded_jobs.append(job.name)

        # Exclude custom binary jobs.
        elif (utils.string_is_true(job_environment.get('CUSTOM_BINARY'))
              or job_environment.get('SYSTEM_BINARY_DIR')):
            excluded_jobs.append(job.name)

    return excluded_jobs
def get_tasks_to_schedule():
  """Return (task_target, job_name, queue_name) arguments to schedule a task."""
  for job in data_types.Job.query():
    if not utils.string_is_true(job.get_environment().get('CORPUS_PRUNE')):
      continue

    queue_name = tasks.queue_for_job(job.name)
    for target_job in fuzz_target_utils.get_fuzz_target_jobs(job=job.name):
      task_target = target_job.fuzz_target_name
      yield (task_target, job.name, queue_name)
Exemplo n.º 12
0
def get_tasks_to_schedule():
    """Return (task_target, job_name, queue_name) arguments to schedule a task."""
    for job in data_types.Job.query():
        if not utils.string_is_true(job.get_environment().get('CORPUS_PRUNE')):
            continue

        if utils.string_is_true(job.get_environment().get('CUSTOM_BINARY')):
            # Custom binary jobs do not have revisions.
            latest_revision = None
        else:
            latest_revision = _get_latest_job_revision(job)
            if not latest_revision:
                continue

        queue_name = tasks.queue_for_job(job.name)
        for target_job in fuzz_target_utils.get_fuzz_target_jobs(job=job.name):
            task_target = target_job.fuzz_target_name
            if latest_revision:
                task_target += '@%s' % latest_revision

            yield (task_target, job.name, queue_name)
Exemplo n.º 13
0
def _get_revision_vars_url_format(job_type):
    """Return REVISION_VARS_URL from job environment if available. Otherwise,
  default to one set in project.yaml. For custom binary jobs, this is not
  applicable."""
    if job_type is None:
        # Force it to use env attribute in project.yaml.
        return local_config.ProjectConfig().get('env.REVISION_VARS_URL')

    custom_binary = data_handler.get_value_from_job_definition(
        job_type, 'CUSTOM_BINARY')
    if utils.string_is_true(custom_binary):
        return None

    return data_handler.get_value_from_job_definition_or_environment(
        job_type, 'REVISION_VARS_URL')
Exemplo n.º 14
0
  def get(self):
    """Handle a GET request."""
    for job in data_types.Job.query():
      if not utils.string_is_true(
          job.get_environment().get('USE_CORPUS_FOR_ML')):
        continue

      target_jobs = list(fuzz_target_utils.get_fuzz_target_jobs(job=job.name))
      fuzz_targets = fuzz_target_utils.get_fuzz_targets_for_target_jobs(
          target_jobs)

      for target in fuzz_targets:
        tasks.add_task(
            'ml_train',
            target.project_qualified_name(),
            job.name,
            queue=tasks.ML_JOBS_TASKQUEUE)
Exemplo n.º 15
0
    def do_post(self):
        """Upload a testcase."""
        email = helpers.get_user_email()
        testcase_id = self.request.get("testcaseId")
        uploaded_file = self.get_upload()
        if testcase_id and not uploaded_file:
            testcase = helpers.get_testcase(testcase_id)
            if not access.can_user_access_testcase(testcase):
                raise helpers.AccessDeniedException()

            # Use minimized testcase for upload (if available).
            key = (testcase.minimized_keys if testcase.minimized_keys
                   and testcase.minimized_keys != "NA" else
                   testcase.fuzzed_keys)

            uploaded_file = blobs.get_blob_info(key)

            # Extract filename part from blob.
            uploaded_file.filename = os.path.basename(
                uploaded_file.filename.replace("\\", os.sep))

        job_type = self.request.get("job")
        if not job_type:
            raise helpers.EarlyExitException("Missing job name.", 400)

        if (not data_types.Job.VALID_NAME_REGEX.match(job_type)
                or not data_types.Job.query(
                    data_types.Job.name == job_type).get()):
            raise helpers.EarlyExitException("Invalid job name.", 400)

        fuzzer_name = ""
        job_type_lowercase = job_type.lower()
        if "libfuzzer" in job_type_lowercase:
            fuzzer_name = "libFuzzer"
        elif "afl" in job_type_lowercase:
            fuzzer_name = "afl"

        target_name = self.request.get("target")
        if not fuzzer_name and target_name:
            raise helpers.EarlyExitException(
                "Target name is not applicable to non-engine jobs (AFL, libFuzzer).",
                400,
            )

        if fuzzer_name and not target_name:
            raise helpers.EarlyExitException(
                "Missing target name for engine job (AFL, libFuzzer).", 400)

        if target_name and not data_types.Fuzzer.VALID_NAME_REGEX.match(
                target_name):
            raise helpers.EarlyExitException("Invalid target name.", 400)

        fully_qualified_fuzzer_name = ""
        if fuzzer_name and target_name:
            fully_qualified_fuzzer_name, target_name = find_fuzz_target(
                fuzzer_name, target_name, job_type)
            if not fully_qualified_fuzzer_name:
                raise helpers.EarlyExitException("Target does not exist.", 400)

        if not access.has_access(
                need_privileged_access=False,
                job_type=job_type,
                fuzzer_name=(fully_qualified_fuzzer_name or fuzzer_name),
        ) and not _is_uploader_allowed(email):
            raise helpers.AccessDeniedException()

        multiple_testcases = bool(self.request.get("multiple"))
        http_flag = bool(self.request.get("http"))
        high_end_job = bool(self.request.get("highEnd"))
        bug_information = self.request.get("issue")
        crash_revision = self.request.get("revision")
        timeout = self.request.get("timeout")
        retries = self.request.get("retries")
        bug_summary_update_flag = bool(self.request.get("updateIssue"))
        quiet_flag = bool(self.request.get("quiet"))
        additional_arguments = self.request.get("args")
        app_launch_command = self.request.get("cmd")
        platform_id = self.request.get("platform")
        issue_labels = self.request.get("issue_labels")
        gestures = self.request.get("gestures") or "[]"

        testcase_metadata = self.request.get("metadata", {})
        if testcase_metadata:
            try:
                testcase_metadata = json.loads(testcase_metadata)
            except Exception:
                raise helpers.EarlyExitException("Invalid metadata JSON.", 400)
            if not isinstance(testcase_metadata, dict):
                raise helpers.EarlyExitException(
                    "Metadata is not a JSON object.", 400)
        if issue_labels:
            testcase_metadata["issue_labels"] = issue_labels

        try:
            gestures = ast.literal_eval(gestures)
        except Exception:
            raise helpers.EarlyExitException("Failed to parse gestures.", 400)

        archive_state = 0
        bundled = False
        file_path_input = ""

        # Certain modifications such as app launch command, issue updates are only
        # allowed for privileged users.
        privileged_user = access.has_access(need_privileged_access=True)
        if not privileged_user:
            if bug_information or bug_summary_update_flag:
                raise helpers.EarlyExitException(
                    "You are not privileged to update existing issues.", 400)

            need_privileged_access = utils.string_is_true(
                data_handler.get_value_from_job_definition(
                    job_type, "PRIVILEGED_ACCESS"))
            if need_privileged_access:
                raise helpers.EarlyExitException(
                    "You are not privileged to run this job type.", 400)

            if app_launch_command:
                raise helpers.EarlyExitException(
                    "You are not privileged to run arbitrary launch commands.",
                    400)

            if testcase_metadata and not _allow_unprivileged_metadata(
                    testcase_metadata):
                raise helpers.EarlyExitException(
                    "You are not privileged to set testcase metadata.", 400)

            if additional_arguments:
                raise helpers.EarlyExitException(
                    "You are not privileged to add command-line arguments.",
                    400)

            if gestures:
                raise helpers.EarlyExitException(
                    "You are not privileged to run arbitrary gestures.", 400)

        # TODO(aarya): Remove once AFL is migrated to engine pipeline.
        if target_name:
            additional_arguments = "%TESTCASE%"

        if crash_revision and crash_revision.isdigit():
            crash_revision = int(crash_revision)
        else:
            crash_revision = 0

        if bug_information == "0":  # Auto-recover from this bad input.
            bug_information = None
        if bug_information and not bug_information.isdigit():
            raise helpers.EarlyExitException("Bug is not a number.", 400)

        if not timeout:
            timeout = 0
        elif not timeout.isdigit() or timeout == "0":
            raise helpers.EarlyExitException(
                "Testcase timeout must be a number greater than 0.", 400)
        else:
            timeout = int(timeout)
            if timeout > 120:
                raise helpers.EarlyExitException(
                    "Testcase timeout may not be greater than 120 seconds.",
                    400)

        if retries:
            if retries.isdigit():
                retries = int(retries)
            else:
                retries = None

            if retries is None or retries > MAX_RETRIES:
                raise helpers.EarlyExitException(
                    "Testcase retries must be a number less than %d." %
                    MAX_RETRIES, 400)
        else:
            retries = None

        job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job)

        if uploaded_file is not None:
            filename = "".join(x for x in uploaded_file.filename
                               if x not in " ;/?:@&=+$,{}|<>()\\")
            key = str(uploaded_file.key())
            if archive.is_archive(filename):
                archive_state = data_types.ArchiveStatus.FUZZED
            if archive_state:
                if multiple_testcases:
                    # Create a job to unpack an archive.
                    metadata = data_types.BundledArchiveMetadata()
                    metadata.blobstore_key = key
                    metadata.timeout = timeout
                    metadata.job_queue = job_queue
                    metadata.job_type = job_type
                    metadata.http_flag = http_flag
                    metadata.archive_filename = filename
                    metadata.uploader_email = email
                    metadata.gestures = gestures
                    metadata.crash_revision = crash_revision
                    metadata.additional_arguments = additional_arguments
                    metadata.bug_information = bug_information
                    metadata.platform_id = platform_id
                    metadata.app_launch_command = app_launch_command
                    metadata.fuzzer_name = fuzzer_name
                    metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name
                    metadata.fuzzer_binary_name = target_name
                    metadata.put()

                    tasks.add_task(
                        "unpack",
                        str(metadata.key.id()),
                        job_type,
                        queue=tasks.queue_for_job(job_type),
                    )

                    # Create a testcase metadata object to show the user their upload.
                    upload_metadata = data_types.TestcaseUploadMetadata()
                    upload_metadata.timestamp = datetime.datetime.utcnow()
                    upload_metadata.filename = filename
                    upload_metadata.blobstore_key = key
                    upload_metadata.original_blobstore_key = key
                    upload_metadata.status = "Pending"
                    upload_metadata.bundled = True
                    upload_metadata.uploader_email = email
                    upload_metadata.retries = retries
                    upload_metadata.bug_summary_update_flag = bug_summary_update_flag
                    upload_metadata.quiet_flag = quiet_flag
                    upload_metadata.additional_metadata_string = json.dumps(
                        testcase_metadata)
                    upload_metadata.put()

                    helpers.log("Uploaded multiple testcases.",
                                helpers.VIEW_OPERATION)
                    self.render_json({"multiple": True})
                    return

                file_path_input = guess_input_file(uploaded_file, filename)
                if not file_path_input:
                    raise helpers.EarlyExitException(
                        ("Unable to detect which file to launch. The main file's name "
                         "must contain either of %s." %
                         str(RUN_FILE_PATTERNS)),
                        400,
                    )

        else:
            raise helpers.EarlyExitException("Please select a file to upload.",
                                             400)

        testcase_id = data_handler.create_user_uploaded_testcase(
            key,
            key,
            archive_state,
            filename,
            file_path_input,
            timeout,
            job_type,
            job_queue,
            http_flag,
            gestures,
            additional_arguments,
            bug_information,
            crash_revision,
            email,
            platform_id,
            app_launch_command,
            fuzzer_name,
            fully_qualified_fuzzer_name,
            target_name,
            bundled,
            retries,
            bug_summary_update_flag,
            quiet_flag,
            additional_metadata=testcase_metadata,
        )

        if not quiet_flag:
            testcase = data_handler.get_testcase_by_id(testcase_id)
            issue = issue_tracker_utils.get_issue_for_testcase(testcase)
            if issue:
                report_url = data_handler.TESTCASE_REPORT_URL.format(
                    domain=data_handler.get_domain(), testcase_id=testcase_id)

                comment = ("ClusterFuzz is analyzing your testcase. "
                           "Developers can follow the progress at %s." %
                           report_url)
                issue.save(new_comment=comment)

        helpers.log("Uploaded testcase %s" % testcase_id,
                    helpers.VIEW_OPERATION)
        self.render_json({"id": "%s" % testcase_id})
Exemplo n.º 16
0
    def do_post(self):
        """Upload a testcase."""
        email = helpers.get_user_email()
        testcase_id = request.get('testcaseId')
        uploaded_file = self.get_upload()
        if testcase_id and not uploaded_file:
            testcase = helpers.get_testcase(testcase_id)
            if not access.can_user_access_testcase(testcase):
                raise helpers.AccessDeniedException()

            # Use minimized testcase for upload (if available).
            key = (testcase.minimized_keys if testcase.minimized_keys
                   and testcase.minimized_keys != 'NA' else
                   testcase.fuzzed_keys)

            uploaded_file = blobs.get_blob_info(key)

            # Extract filename part from blob.
            uploaded_file.filename = os.path.basename(
                uploaded_file.filename.replace('\\', os.sep))

        job_type = request.get('job')
        if not job_type:
            raise helpers.EarlyExitException('Missing job name.', 400)

        job = data_types.Job.query(data_types.Job.name == job_type).get()
        if not job:
            raise helpers.EarlyExitException('Invalid job name.', 400)

        fuzzer_name = request.get('fuzzer')
        job_type_lowercase = job_type.lower()
        if 'libfuzzer' in job_type_lowercase:
            fuzzer_name = 'libFuzzer'
        elif 'afl' in job_type_lowercase:
            fuzzer_name = 'afl'
        elif 'honggfuzz' in job_type_lowercase:
            fuzzer_name = 'honggfuzz'

        is_engine_job = fuzzer_name and environment.is_engine_fuzzer_job(
            job_type)
        target_name = request.get('target')
        if not is_engine_job and target_name:
            raise helpers.EarlyExitException(
                'Target name is not applicable to non-engine jobs (AFL, libFuzzer).',
                400)

        if is_engine_job and not target_name:
            raise helpers.EarlyExitException(
                'Missing target name for engine job (AFL, libFuzzer).', 400)

        if (target_name
                and not data_types.Fuzzer.VALID_NAME_REGEX.match(target_name)):
            raise helpers.EarlyExitException('Invalid target name.', 400)

        fully_qualified_fuzzer_name = ''
        if is_engine_job and target_name:
            if job.is_external():
                # External jobs don't run and set FuzzTarget entities as part of
                # fuzz_task. Set it here instead.
                fuzz_target = (data_handler.record_fuzz_target(
                    fuzzer_name, target_name, job_type))
                fully_qualified_fuzzer_name = fuzz_target.fully_qualified_name(
                )
                target_name = fuzz_target.binary
            else:
                fully_qualified_fuzzer_name, target_name = find_fuzz_target(
                    fuzzer_name, target_name, job_type)

        if (not access.has_access(need_privileged_access=False,
                                  job_type=job_type,
                                  fuzzer_name=(fully_qualified_fuzzer_name
                                               or fuzzer_name))
                and not _is_uploader_allowed(email)):
            raise helpers.AccessDeniedException()

        multiple_testcases = bool(request.get('multiple'))
        http_flag = bool(request.get('http'))
        high_end_job = bool(request.get('highEnd'))
        bug_information = request.get('issue')
        crash_revision = request.get('revision')
        timeout = request.get('timeout')
        retries = request.get('retries')
        bug_summary_update_flag = bool(request.get('updateIssue'))
        quiet_flag = bool(request.get('quiet'))
        additional_arguments = request.get('args')
        app_launch_command = request.get('cmd')
        platform_id = request.get('platform')
        issue_labels = request.get('issue_labels')
        gestures = request.get('gestures') or '[]'
        stacktrace = request.get('stacktrace')

        crash_data = None
        if job.is_external():
            if not stacktrace:
                raise helpers.EarlyExitException(
                    'Stacktrace required for external jobs.', 400)

            if not crash_revision:
                raise helpers.EarlyExitException(
                    'Revision required for external jobs.', 400)

            crash_data = stack_analyzer.get_crash_data(
                stacktrace,
                fuzz_target=target_name,
                symbolize_flag=False,
                already_symbolized=True,
                detect_ooms_and_hangs=True)
        elif stacktrace:
            raise helpers.EarlyExitException(
                'Should not specify stacktrace for non-external jobs.', 400)

        testcase_metadata = request.get('metadata', {})
        if testcase_metadata:
            try:
                testcase_metadata = json.loads(testcase_metadata)
            except Exception as e:
                raise helpers.EarlyExitException('Invalid metadata JSON.',
                                                 400) from e
            if not isinstance(testcase_metadata, dict):
                raise helpers.EarlyExitException(
                    'Metadata is not a JSON object.', 400)
        if issue_labels:
            testcase_metadata['issue_labels'] = issue_labels

        try:
            gestures = ast.literal_eval(gestures)
        except Exception as e:
            raise helpers.EarlyExitException('Failed to parse gestures.',
                                             400) from e

        archive_state = 0
        bundled = False
        file_path_input = ''

        # Certain modifications such as app launch command, issue updates are only
        # allowed for privileged users.
        privileged_user = access.has_access(need_privileged_access=True)
        if not privileged_user:
            if bug_information or bug_summary_update_flag:
                raise helpers.EarlyExitException(
                    'You are not privileged to update existing issues.', 400)

            need_privileged_access = utils.string_is_true(
                data_handler.get_value_from_job_definition(
                    job_type, 'PRIVILEGED_ACCESS'))
            if need_privileged_access:
                raise helpers.EarlyExitException(
                    'You are not privileged to run this job type.', 400)

            if app_launch_command:
                raise helpers.EarlyExitException(
                    'You are not privileged to run arbitrary launch commands.',
                    400)

            if (testcase_metadata
                    and not _allow_unprivileged_metadata(testcase_metadata)):
                raise helpers.EarlyExitException(
                    'You are not privileged to set testcase metadata.', 400)

            if additional_arguments:
                raise helpers.EarlyExitException(
                    'You are not privileged to add command-line arguments.',
                    400)

            if gestures:
                raise helpers.EarlyExitException(
                    'You are not privileged to run arbitrary gestures.', 400)

        # TODO(aarya): Remove once AFL is migrated to engine pipeline.
        if target_name:
            additional_arguments = '%TESTCASE%'

        if crash_revision and crash_revision.isdigit():
            crash_revision = int(crash_revision)
        else:
            crash_revision = 0

        if bug_information == '0':  # Auto-recover from this bad input.
            bug_information = None
        if bug_information and not bug_information.isdigit():
            raise helpers.EarlyExitException('Bug is not a number.', 400)

        if not timeout:
            timeout = 0
        elif not timeout.isdigit() or timeout == '0':
            raise helpers.EarlyExitException(
                'Testcase timeout must be a number greater than 0.', 400)
        else:
            timeout = int(timeout)
            if timeout > 120:
                raise helpers.EarlyExitException(
                    'Testcase timeout may not be greater than 120 seconds.',
                    400)

        if retries:
            if retries.isdigit():
                retries = int(retries)
            else:
                retries = None

            if retries is None or retries > MAX_RETRIES:
                raise helpers.EarlyExitException(
                    'Testcase retries must be a number less than %d.' %
                    MAX_RETRIES, 400)
        else:
            retries = None

        job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job)

        if uploaded_file is not None:
            filename = ''.join([
                x for x in uploaded_file.filename
                if x not in ' ;/?:@&=+$,{}|<>()\\'
            ])
            key = str(uploaded_file.key())
            if archive.is_archive(filename):
                archive_state = data_types.ArchiveStatus.FUZZED
            if archive_state:
                if multiple_testcases:
                    # Create a job to unpack an archive.
                    metadata = data_types.BundledArchiveMetadata()
                    metadata.blobstore_key = key
                    metadata.timeout = timeout
                    metadata.job_queue = job_queue
                    metadata.job_type = job_type
                    metadata.http_flag = http_flag
                    metadata.archive_filename = filename
                    metadata.uploader_email = email
                    metadata.gestures = gestures
                    metadata.crash_revision = crash_revision
                    metadata.additional_arguments = additional_arguments
                    metadata.bug_information = bug_information
                    metadata.platform_id = platform_id
                    metadata.app_launch_command = app_launch_command
                    metadata.fuzzer_name = fuzzer_name
                    metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name
                    metadata.fuzzer_binary_name = target_name
                    metadata.put()

                    tasks.add_task('unpack',
                                   str(metadata.key.id()),
                                   job_type,
                                   queue=tasks.queue_for_job(job_type))

                    # Create a testcase metadata object to show the user their upload.
                    upload_metadata = data_types.TestcaseUploadMetadata()
                    upload_metadata.timestamp = datetime.datetime.utcnow()
                    upload_metadata.filename = filename
                    upload_metadata.blobstore_key = key
                    upload_metadata.original_blobstore_key = key
                    upload_metadata.status = 'Pending'
                    upload_metadata.bundled = True
                    upload_metadata.uploader_email = email
                    upload_metadata.retries = retries
                    upload_metadata.bug_summary_update_flag = bug_summary_update_flag
                    upload_metadata.quiet_flag = quiet_flag
                    upload_metadata.additional_metadata_string = json.dumps(
                        testcase_metadata)
                    upload_metadata.bug_information = bug_information
                    upload_metadata.put()

                    helpers.log('Uploaded multiple testcases.',
                                helpers.VIEW_OPERATION)
                    return self.render_json({'multiple': True})

                file_path_input = guess_input_file(uploaded_file, filename)
                if not file_path_input:
                    raise helpers.EarlyExitException((
                        "Unable to detect which file to launch. The main file\'s name "
                        'must contain either of %s.' % str(RUN_FILE_PATTERNS)),
                                                     400)

        else:
            raise helpers.EarlyExitException('Please select a file to upload.',
                                             400)

        testcase_id = data_handler.create_user_uploaded_testcase(
            key,
            key,
            archive_state,
            filename,
            file_path_input,
            timeout,
            job,
            job_queue,
            http_flag,
            gestures,
            additional_arguments,
            bug_information,
            crash_revision,
            email,
            platform_id,
            app_launch_command,
            fuzzer_name,
            fully_qualified_fuzzer_name,
            target_name,
            bundled,
            retries,
            bug_summary_update_flag,
            quiet_flag,
            additional_metadata=testcase_metadata,
            crash_data=crash_data)

        if not quiet_flag:
            testcase = data_handler.get_testcase_by_id(testcase_id)
            issue = issue_tracker_utils.get_issue_for_testcase(testcase)
            if issue:
                report_url = data_handler.TESTCASE_REPORT_URL.format(
                    domain=data_handler.get_domain(), testcase_id=testcase_id)

                comment = ('ClusterFuzz is analyzing your testcase. '
                           'Developers can follow the progress at %s.' %
                           report_url)
                issue.save(new_comment=comment)

        helpers.log('Uploaded testcase %s' % testcase_id,
                    helpers.VIEW_OPERATION)
        return self.render_json({'id': '%s' % testcase_id})
Exemplo n.º 17
0
    def do_post(self):
        """Upload a testcase."""
        testcase_id = self.request.get('testcaseId')
        uploaded_file = self.get_upload()
        if testcase_id and not uploaded_file:
            testcase = helpers.get_testcase(testcase_id)
            if not access.can_user_access_testcase(testcase):
                raise helpers.AccessDeniedException()

            # Use minimized testcase for upload (if available).
            key = (testcase.minimized_keys if testcase.minimized_keys
                   and testcase.minimized_keys != 'NA' else
                   testcase.fuzzed_keys)

            uploaded_file = blobs.get_blob_info(key)

            # Extract filename part from blob.
            uploaded_file.filename = os.path.basename(
                uploaded_file.filename.replace('\\', os.sep))

        job_type = self.request.get('job')
        if not job_type:
            raise helpers.EarlyExitException('Missing job name.', 400)

        if (not data_types.Job.VALID_NAME_REGEX.match(job_type)
                or not data_types.Job.query(
                    data_types.Job.name == job_type).get()):
            raise helpers.EarlyExitException('Invalid job name.', 400)

        fuzzer_name = ''
        job_type_lowercase = job_type.lower()
        if 'libfuzzer' in job_type_lowercase:
            fuzzer_name = 'libFuzzer'
        elif 'afl' in job_type_lowercase:
            fuzzer_name = 'afl'

        target_name = self.request.get('target')
        if not fuzzer_name and target_name:
            raise helpers.EarlyExitException(
                'Target name is not applicable to non-engine jobs (AFL, libFuzzer).',
                400)

        if fuzzer_name and not target_name:
            raise helpers.EarlyExitException(
                'Missing target name for engine job (AFL, libFuzzer).', 400)

        if (target_name
                and not data_types.Fuzzer.VALID_NAME_REGEX.match(target_name)):
            raise helpers.EarlyExitException('Invalid target name.', 400)

        fully_qualified_fuzzer_name = ''
        if fuzzer_name and target_name:
            fully_qualified_fuzzer_name, target_name = find_fuzz_target(
                fuzzer_name, target_name, job_type)
            if not fully_qualified_fuzzer_name:
                raise helpers.EarlyExitException('Target does not exist.', 400)

        if not access.has_access(need_privileged_access=False,
                                 job_type=job_type,
                                 fuzzer_name=(fully_qualified_fuzzer_name
                                              or fuzzer_name)):
            raise helpers.AccessDeniedException()

        multiple_testcases = bool(self.request.get('multiple'))
        http_flag = bool(self.request.get('http'))
        high_end_job = bool(self.request.get('highEnd'))
        bug_information = self.request.get('issue')
        crash_revision = self.request.get('revision')
        timeout = self.request.get('timeout')
        retries = self.request.get('retries')
        bug_summary_update_flag = bool(self.request.get('updateIssue'))
        additional_arguments = self.request.get('args')
        app_launch_command = self.request.get('cmd')
        platform_id = self.request.get('platform')

        testcase_metadata = self.request.get('metadata')
        if testcase_metadata:
            try:
                testcase_metadata = json.loads(testcase_metadata)
                if not isinstance(testcase_metadata, dict):
                    raise helpers.EarlyExitException(
                        'Metadata is not a JSON object.', 400)
            except Exception:
                raise helpers.EarlyExitException('Invalid metadata JSON.', 400)

        archive_state = 0
        bundled = False
        file_path_input = ''
        email = helpers.get_user_email()

        # If we have a AFL or libFuzzer target, use that for arguments.
        # Launch command looks like
        # python launcher.py {testcase_path} {target_name}
        if target_name:
            additional_arguments = '%%TESTCASE%% %s' % target_name

        # Certain modifications such as app launch command, issue updates are only
        # allowed for privileged users.
        privileged_user = access.has_access(need_privileged_access=True)
        if not privileged_user:
            if bug_information or bug_summary_update_flag:
                raise helpers.EarlyExitException(
                    'You are not privileged to update existing issues.', 400)

            need_privileged_access = utils.string_is_true(
                data_handler.get_value_from_job_definition(
                    job_type, 'PRIVILEGED_ACCESS'))
            if need_privileged_access:
                raise helpers.EarlyExitException(
                    'You are not privileged to run this job type.', 400)

            if app_launch_command:
                raise helpers.EarlyExitException(
                    'You are not privileged to run arbitary launch commands.',
                    400)

            if testcase_metadata:
                raise helpers.EarlyExitException(
                    'You are not privileged to set testcase metadata.', 400)

        if crash_revision and crash_revision.isdigit():
            crash_revision = int(crash_revision)
        else:
            crash_revision = 0

        if bug_information and not bug_information.isdigit():
            raise helpers.EarlyExitException('Bug is not a number.', 400)

        if not timeout:
            timeout = 0
        elif not timeout.isdigit() or timeout == '0':
            raise helpers.EarlyExitException(
                'Testcase timeout must be a number greater than 0.', 400)
        else:
            timeout = int(timeout)
            if timeout > 120:
                raise helpers.EarlyExitException(
                    'Testcase timeout may not be greater than 120 seconds.',
                    400)

        if retries:
            if retries.isdigit():
                retries = int(retries)
            else:
                retries = None

            if retries is None or retries > MAX_RETRIES:
                raise helpers.EarlyExitException(
                    'Testcase retries must be a number less than %d.' %
                    MAX_RETRIES, 400)
        else:
            retries = None

        try:
            gestures = ast.literal_eval(self.request.get('gestures'))
        except:
            gestures = []
        if not gestures:
            gestures = []

        job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job)

        if uploaded_file is not None:
            filename = ''.join([
                x for x in uploaded_file.filename
                if x not in ' ;/?:@&=+$,{}|<>()\\'
            ])
            key = str(uploaded_file.key())
            if archive.is_archive(filename):
                archive_state = data_types.ArchiveStatus.FUZZED
            if archive_state:
                if multiple_testcases:
                    if testcase_metadata:
                        raise helpers.EarlyExitException(
                            'Testcase metadata not supported with multiple testcases.',
                            400)
                    # Create a job to unpack an archive.
                    metadata = data_types.BundledArchiveMetadata()
                    metadata.blobstore_key = key
                    metadata.timeout = timeout
                    metadata.job_queue = job_queue
                    metadata.job_type = job_type
                    metadata.http_flag = http_flag
                    metadata.archive_filename = filename
                    metadata.uploader_email = email
                    metadata.gestures = gestures
                    metadata.crash_revision = crash_revision
                    metadata.additional_arguments = additional_arguments
                    metadata.bug_information = bug_information
                    metadata.platform_id = platform_id
                    metadata.app_launch_command = app_launch_command
                    metadata.fuzzer_name = fuzzer_name
                    metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name
                    metadata.fuzzer_binary_name = target_name
                    metadata.put()

                    tasks.add_task('unpack',
                                   str(metadata.key.id()),
                                   job_type,
                                   queue=tasks.queue_for_job(job_type))

                    # Create a testcase metadata object to show the user their upload.
                    upload_metadata = data_types.TestcaseUploadMetadata()
                    upload_metadata.timestamp = datetime.datetime.utcnow()
                    upload_metadata.filename = filename
                    upload_metadata.blobstore_key = key
                    upload_metadata.original_blobstore_key = key
                    upload_metadata.status = 'Pending'
                    upload_metadata.bundled = True
                    upload_metadata.uploader_email = email
                    upload_metadata.retries = retries
                    upload_metadata.bug_summary_update_flag = bug_summary_update_flag
                    upload_metadata.put()

                    helpers.log('Uploaded multiple testcases.',
                                helpers.VIEW_OPERATION)
                    self.render_json({'multiple': True})
                    return

                file_path_input = guess_input_file(uploaded_file, filename)
                if not file_path_input:
                    raise helpers.EarlyExitException((
                        "Unable to detect which file to launch. The main file\'s name "
                        'must contain either of %s.' % str(RUN_FILE_PATTERNS)),
                                                     400)

        else:
            raise helpers.EarlyExitException('Please select a file to upload.',
                                             400)

        testcase_id = data_handler.create_user_uploaded_testcase(
            key,
            key,
            archive_state,
            filename,
            file_path_input,
            timeout,
            job_type,
            job_queue,
            http_flag,
            gestures,
            additional_arguments,
            bug_information,
            crash_revision,
            email,
            platform_id,
            app_launch_command,
            fuzzer_name,
            fully_qualified_fuzzer_name,
            target_name,
            bundled,
            retries,
            bug_summary_update_flag,
            additional_metadata=testcase_metadata)

        testcase = data_handler.get_testcase_by_id(testcase_id)
        issue = issue_tracker_utils.get_issue_for_testcase(testcase)
        if issue:
            report_url = data_handler.TESTCASE_REPORT_URL.format(
                domain=data_handler.get_domain(), testcase_id=testcase_id)
            comment = ('ClusterFuzz is analyzing your testcase. '
                       'Developers can follow the progress at %s.' %
                       report_url)
            issue.save(new_comment=comment)

        helpers.log('Uploaded testcase %s' % testcase_id,
                    helpers.VIEW_OPERATION)
        self.render_json({'id': '%s' % testcase_id})