예제 #1
0
    def get(self):
        """Handle a get request."""
        fuzzer_logs_bucket = fuzzer_logs.get_bucket()
        fuzzers = list(data_types.Fuzzer.query().order(data_types.Fuzzer.name))
        jobs = data_handler.get_all_job_type_names()
        corpora = [
            bundle.name for bundle in data_types.DataBundle.query().order(
                data_types.DataBundle.name)
        ]

        privileged = access.has_access(need_privileged_access=True)
        # Unprivileged users can't download fuzzers, so hide the download keys.
        if not privileged:
            for fuzzer in fuzzers:
                fuzzer.blobstore_key = ''

        template_values = {
            'privileged': privileged,
            'fuzzers': fuzzers,
            'fuzzerLogsBucket': fuzzer_logs_bucket,
            'fieldValues': {
                'corpora': corpora,
                'jobs': jobs,
                'uploadInfo': gcs.prepare_blob_upload()._asdict(),
                'csrfToken': form.generate_csrf_token(),
            }
        }

        self.render('fuzzers.html', template_values)
예제 #2
0
        def wrapper(self, *args, **kwargs):
            """Wrapper."""
            if not access.has_access(
                    need_privileged_access=need_privileged_access):
                raise helpers.AccessDeniedException()

            return func(self, *args, **kwargs)
예제 #3
0
  def get(self):
    """Handle a GET request."""
    # Create a list of externally contributed fuzzers.
    user_email = helpers.get_user_email()

    if access.has_access():
      # User is an internal user of ClusterFuzz (eg: ClusterFuzz developer).
      fuzzers_list = (
          data_handler.get_all_fuzzer_names_including_children(
              include_parents=True))
      jobs_list = data_handler.get_all_job_type_names()
    else:
      # User is an external user of ClusterFuzz (eg: non-Chrome dev who
      # submitted a fuzzer or someone with a project in OSS-Fuzz).
      fuzzers_list = external_users.allowed_fuzzers_for_user(
          user_email, include_from_jobs=True, include_parents=True)

      if not fuzzers_list:
        # User doesn't actually have access to any fuzzers.
        raise helpers.AccessDeniedException()

      jobs_list = external_users.allowed_jobs_for_user(user_email)

    fuzzers_list.sort()
    jobs_list.sort()
    result = {
        'info': {
            'fuzzers': fuzzers_list,
            'jobs': jobs_list,
        }
    }

    self.render('fuzzer-stats.html', result)
예제 #4
0
    def get(self):
        """Get the HTML page."""
        key = self.request.get('key')
        if not key:
            raise helpers.EarlyExitException('No key provided.', 400)

        testcase_id = self.request.get('testcase_id')
        if testcase_id:
            testcase = helpers.get_testcase(testcase_id)
            if not access.can_user_access_testcase(testcase):
                raise helpers.AccessDeniedException()

            if key not in [testcase.fuzzed_keys, testcase.minimized_keys]:
                raise helpers.AccessDeniedException()
        else:
            if not access.has_access():
                raise helpers.AccessDeniedException()

        if blobs.get_blob_size(key) > MAX_ALLOWED_CONTENT_SIZE:
            raise helpers.EarlyExitException(
                'Content exceeds max allowed size.', 400)

        try:
            content = unicode(blobs.read_key(key), errors='replace')
        except Exception:
            raise helpers.EarlyExitException('Failed to read content.', 400)

        line_count = len(content.splitlines())
        size = len(content)
        title = '%s, %s' % (utils.get_line_count_string(line_count),
                            utils.get_size_string(size))

        self.render('viewer.html', {'content': content, 'title': title})
예제 #5
0
  def get(self, resource=None):
    """Handle a get request with resource."""
    testcase = None
    testcase_id = request.args.get('testcase_id')
    if not testcase_id and not resource:
      raise helpers.EarlyExitException('No file requested.', 400)

    if testcase_id:
      try:
        testcase = data_handler.get_testcase_by_id(testcase_id)
      except errors.InvalidTestcaseError:
        raise helpers.EarlyExitException('Invalid testcase.', 400)

      if not resource:
        if testcase.minimized_keys and testcase.minimized_keys != 'NA':
          resource = testcase.minimized_keys
        else:
          resource = testcase.fuzzed_keys

    fuzzer_binary_name = None
    if testcase:
      fuzzer_binary_name = testcase.get_metadata('fuzzer_binary_name')

    resource = str(urllib.parse.unquote(resource))
    blob_info = blobs.get_blob_info(resource)
    if not blob_info:
      raise helpers.EarlyExitException('File does not exist.', 400)

    if (testcase and testcase.fuzzed_keys != blob_info.key() and
        testcase.minimized_keys != blob_info.key()):
      raise helpers.EarlyExitException('Invalid testcase.', 400)

    if (utils.is_oss_fuzz() and testcase and
        self.check_public_testcase(blob_info, testcase)):
      # Public OSS-Fuzz testcase.
      return self._send_blob(
          blob_info,
          testcase.key.id(),
          is_minimized=True,
          fuzzer_binary_name=fuzzer_binary_name)

    is_minimized = testcase and blob_info.key() == testcase.minimized_keys
    if access.has_access():
      # User has general access.
      return self._send_blob(blob_info, testcase_id, is_minimized,
                             fuzzer_binary_name)

    # If this blobstore file is for a testcase, check if the user has access to
    # the testcase.
    if not testcase:
      raise helpers.AccessDeniedException()

    if access.can_user_access_testcase(testcase):
      return self._send_blob(blob_info, testcase_id, is_minimized,
                             fuzzer_binary_name)

    raise helpers.AccessDeniedException()
예제 #6
0
파일: show.py 프로젝트: ze0r/clusterfuzz
    def get(self, fuzzer_name=None, job_type=None, logs_date=None):
        """Handle a GET request."""
        if not fuzzer_name:
            raise helpers.EarlyExitException('Fuzzer name cannot be empty.',
                                             400)

        if not job_type:
            raise helpers.EarlyExitException('Job type cannot be empty.', 400)

        if not logs_date:
            raise helpers.EarlyExitException('Logs Date cannot be empty.', 400)

        if not access.has_access(fuzzer_name=fuzzer_name, job_type=job_type):
            raise helpers.AccessDeniedException()

        performance_features, date = _get_performance_report_data(
            fuzzer_name, job_type, logs_date)

        performance_data = performance_features.rows

        analyzer = performance_analyzer.LibFuzzerPerformanceAnalyzer()

        # It is possible to break the analysis by requesting outdated stats.
        try:
            total_time = sum(row['actual_duration']
                             for row in performance_data)
            performance_scores, affected_runs_percents, examples = (
                analyzer.analyze_stats(performance_data))
        except (KeyError, TypeError, ValueError) as e:
            logging.error('Exception during performance analysis: %s\n',
                          str(e))
            raise helpers.EarlyExitException(
                'Cannot analyze performance for the requested time period.',
                404)

        # Build performance analysis result.
        performance_issues = analyzer.get_issues(performance_scores,
                                                 affected_runs_percents,
                                                 examples)

        performance_report = performance_analyzer.generate_report(
            performance_issues, fuzzer_name, job_type)

        report = _get_performance_report(fuzzer_name, job_type,
                                         performance_report)

        result = {
            'info': {
                'date': str(date),
                'fuzzer_name': report['fuzzer_name'],
                'fuzzer_runs': performance_features.total_count,
                'job_type': report['job_type'],
                'table_data': _build_rows_and_columns(report),
                'total_time': str(datetime.timedelta(seconds=total_time)),
            }
        }
        return self.render('performance-report.html', result)
예제 #7
0
    def get(self):
        """Handles get request."""
        email = helpers.get_user_email()
        if not email:
            raise helpers.AccessDeniedException()

        is_privileged_or_domain_user = access.has_access(
            need_privileged_access=False)
        if is_privileged_or_domain_user or _is_uploader_allowed(email):
            # Privileged, domain and upload users can see all job and fuzzer names.
            allowed_jobs = data_handler.get_all_job_type_names()
            allowed_fuzzers = data_handler.get_all_fuzzer_names_including_children(
                include_parents=True)
        else:
            # Check if this is an external user with access to certain fuzzers/jobs.
            allowed_jobs = external_users.allowed_jobs_for_user(email)
            allowed_fuzzers = external_users.allowed_fuzzers_for_user(
                email, include_from_jobs=True)

            if not allowed_fuzzers and not allowed_jobs:
                raise helpers.AccessDeniedException()

        has_issue_tracker = bool(data_handler.get_issue_tracker_name())

        result, params = get_result()
        return self.render(
            'upload.html', {
                'fieldValues': {
                    'blackboxFuzzers':
                    filter_blackbox_fuzzers(allowed_fuzzers),
                    'jobs':
                    allowed_jobs,
                    'libfuzzerTargets':
                    filter_target_names(allowed_fuzzers, 'libFuzzer'),
                    'aflTargets':
                    filter_target_names(allowed_fuzzers, 'afl'),
                    'honggfuzzTargets':
                    filter_target_names(allowed_fuzzers, 'honggfuzz'),
                    'isChromium':
                    utils.is_chromium(),
                    'sandboxedJobs':
                    data_types.INTERNAL_SANDBOXED_JOB_TYPES,
                    'csrfToken':
                    form.generate_csrf_token(),
                    'isExternalUser':
                    not is_privileged_or_domain_user,
                    'uploadInfo':
                    gcs.prepare_blob_upload()._asdict(),
                    'hasIssueTracker':
                    has_issue_tracker,
                },
                'params': params,
                'result': result
            })
예제 #8
0
def get_scope():
    """Get the scope object for the user."""
    user_email = helpers.get_user_email()
    is_privileged = access.has_access(need_privileged_access=True)
    everything = (is_privileged or access.has_access())

    # pylint: disable=protected-access
    job_types = external_users._allowed_entities_for_user(
        user_email, data_types.PermissionEntityKind.JOB)

    allowed_job_type = access.get_user_job_type()
    if allowed_job_type:
        job_types.append(allowed_job_type)

    # pylint: disable=protected-access
    fuzzer_names = external_users._allowed_entities_for_user(
        user_email, data_types.PermissionEntityKind.FUZZER)

    return Scope(everything, is_privileged, job_types, fuzzer_names,
                 allowed_job_type)
예제 #9
0
    def get(self):
        """Handles get request."""
        email = helpers.get_user_email()
        if not email:
            raise helpers.AccessDeniedException()

        is_privileged_or_domain_user = access.has_access(
            need_privileged_access=False)
        if is_privileged_or_domain_user or _is_uploader_allowed(email):
            # Privileged, domain and upload users can see all job and fuzzer names.
            allowed_jobs = data_handler.get_all_job_type_names()
            allowed_fuzzers = data_handler.get_all_fuzzer_names_including_children(
                include_parents=True)
        else:
            # Check if this is an external user with access to certain fuzzers/jobs.
            allowed_jobs = external_users.allowed_jobs_for_user(email)
            allowed_fuzzers = external_users.allowed_fuzzers_for_user(
                email, include_from_jobs=True)

            if not allowed_fuzzers and not allowed_jobs:
                raise helpers.AccessDeniedException()

        has_issue_tracker = bool(data_handler.get_issue_tracker_name())

        result, params = get_result(self)
        self.render(
            "upload.html",
            {
                "fieldValues": {
                    "jobs":
                    allowed_jobs,
                    "libfuzzerTargets":
                    filter_target_names(allowed_fuzzers, "libFuzzer"),
                    "aflTargets":
                    filter_target_names(allowed_fuzzers, "afl"),
                    "isChromium":
                    utils.is_chromium(),
                    "sandboxedJobs":
                    data_types.INTERNAL_SANDBOXED_JOB_TYPES,
                    "csrfToken":
                    form.generate_csrf_token(),
                    "isExternalUser":
                    not is_privileged_or_domain_user,
                    "uploadInfo":
                    gcs.prepare_blob_upload()._asdict(),
                    "hasIssueTracker":
                    has_issue_tracker,
                },
                "params": params,
                "result": result,
            },
        )
예제 #10
0
    def get(self, extra=None):
        """Handle a GET request."""
        if not access.has_access():
            # User is an external user of ClusterFuzz (eg: non-Chrome dev who
            # submitted a fuzzer or someone with a project in OSS-Fuzz).
            user_email = helpers.get_user_email()
            fuzzers_list = external_users.allowed_fuzzers_for_user(
                user_email, include_from_jobs=True, include_parents=True)
            if not fuzzers_list:
                # User doesn't actually have access to any fuzzers.
                raise helpers.AccessDeniedException(
                    "You don't have access to any fuzzers.")

        return self.render('fuzzer-stats.html', {})
예제 #11
0
    def post(self):
        """Mark the testcase as security-related."""
        testcase_id = request.get('testcaseId')
        security = request.get('security')
        severity = request.get('severity')
        testcase = helpers.get_testcase(testcase_id)

        if not access.has_access(fuzzer_name=testcase.actual_fuzzer_name(),
                                 job_type=testcase.job_type,
                                 need_privileged_access=True):
            raise helpers.AccessDeniedException()

        mark(testcase, security, severity)
        return self.render_json(show.get_testcase_detail(testcase))
예제 #12
0
def get_result(this):
    """Get the result."""
    params = {k: v for k, v in this.request.iterparams()}
    page = helpers.cast(
        this.request.get("page") or 1, int, "'page' is not an int.")

    query = datastore_query.Query(data_types.TestcaseUploadMetadata)
    query.order("timestamp", is_desc=True)

    if not access.has_access(need_privileged_access=True):
        query.filter("uploader_email", helpers.get_user_email())
        params["permission"] = {"uploaderEmail": helpers.get_user_email()}

    entities, total_pages, total_items, has_more = query.fetch_page(
        page=page, page_size=PAGE_SIZE, projection=None, more_limit=MORE_LIMIT)

    items = []
    for entity in entities:
        items.append({
            "timestamp":
            utils.utc_datetime_to_timestamp(entity.timestamp),
            "testcaseId":
            entity.testcase_id,
            "uploaderEmail":
            entity.uploader_email,
            "filename":
            entity.filename,
            "bundled":
            entity.bundled,
            "pathInArchive":
            entity.path_in_archive,
            "status":
            entity.status,
        })

    attach_testcases(items)

    result = {
        "hasMore": has_more,
        "items": items,
        "page": page,
        "pageSize": PAGE_SIZE,
        "totalItems": total_items,
        "totalPages": total_pages,
    }
    return result, params
예제 #13
0
def get_result(this):
    """Get the result."""
    params = {k: v for k, v in this.request.iterparams()}
    page = helpers.cast(
        this.request.get('page') or 1, int, "'page' is not an int.")

    query = datastore_query.Query(data_types.TestcaseUploadMetadata)
    query.order('timestamp', is_desc=True)

    if not access.has_access(need_privileged_access=True):
        query.filter('uploader_email', helpers.get_user_email())
        params['permission'] = {'uploaderEmail': helpers.get_user_email()}

    entities, total_pages, total_items, has_more = query.fetch_page(
        page=page, page_size=PAGE_SIZE, projection=None, more_limit=MORE_LIMIT)

    items = []
    for entity in entities:
        items.append({
            'timestamp':
            utils.utc_datetime_to_timestamp(entity.timestamp),
            'testcaseId':
            entity.testcase_id,
            'uploaderEmail':
            entity.uploader_email,
            'filename':
            entity.filename,
            'bundled':
            entity.bundled,
            'pathInArchive':
            entity.path_in_archive,
            'status':
            entity.status
        })

    attach_testcases(items)

    result = {
        'hasMore': has_more,
        'items': items,
        'page': page,
        'pageSize': PAGE_SIZE,
        'totalItems': total_items,
        'totalPages': total_pages,
    }
    return result, params
예제 #14
0
    def _check_user_access_and_get_job_filter(self, fuzzer, job):
        """Check whether the current user has access to stats for the fuzzer or job.
    Returns a job filter that should be applied to the query."""
        access_by_fuzzer_or_job = access.has_access(fuzzer_name=fuzzer,
                                                    job_type=job)
        if access_by_fuzzer_or_job:
            # User has full access to the fuzzer, or the specified job.
            # None means no filters => all jobs.
            return _get_filter_from_job(job)

        if not job:
            # Job not specified and user doesn't have full access to the fuzzer. Check
            # if the user has any allowed jobs and use that as a filter.
            allowed_jobs = external_users.allowed_jobs_for_user(
                helpers.get_user_email())
            if allowed_jobs:
                return allowed_jobs

        raise helpers.AccessDeniedException()
예제 #15
0
    def get(self):
        """Handle a GET request."""
        project = request.get('project')

        if access.has_access():
            # User is an internal user of ClusterFuzz (eg: ClusterFuzz developer).

            # Show all projects in the list, since this allows user to pick another
            # project as needed.
            projects_list = data_handler.get_all_project_names()

            # Filter fuzzers and job list if a project is provided.
            fuzzers_list = (
                data_handler.get_all_fuzzer_names_including_children(
                    include_parents=True, project=project))
            jobs_list = data_handler.get_all_job_type_names(project=project)
        else:
            # User is an external user of ClusterFuzz (eg: non-Chrome dev who
            # submitted a fuzzer or someone with a project in OSS-Fuzz).
            user_email = helpers.get_user_email()

            # TODO(aarya): Filter fuzzer and job if |project| is provided.
            fuzzers_list = sorted(
                external_users.allowed_fuzzers_for_user(user_email,
                                                        include_from_jobs=True,
                                                        include_parents=True))
            if not fuzzers_list:
                # User doesn't actually have access to any fuzzers.
                raise helpers.AccessDeniedException(
                    "You don't have access to any fuzzers.")

            jobs_list = sorted(
                external_users.allowed_jobs_for_user(user_email))
            projects_list = sorted(
                {data_handler.get_project_name(job)
                 for job in jobs_list})

        result = {
            'projects': projects_list,
            'fuzzers': fuzzers_list,
            'jobs': jobs_list,
        }
        return self.render_json(result)
예제 #16
0
    def get(self):
        """Get the HTML page."""
        key = request.get('key')
        if not key:
            raise helpers.EarlyExitException('No key provided.', 400)

        testcase_id = request.get('testcase_id')
        if testcase_id:
            testcase = helpers.get_testcase(testcase_id)
            if not access.can_user_access_testcase(testcase):
                raise helpers.AccessDeniedException()

            if key not in [testcase.fuzzed_keys, testcase.minimized_keys]:
                raise helpers.AccessDeniedException()
        else:
            if not access.has_access():
                raise helpers.AccessDeniedException()

        blob_size = blobs.get_blob_size(key)
        if blob_size > MAX_ALLOWED_CONTENT_SIZE:
            raise helpers.EarlyExitException(
                'Content exceeds max allowed size.', 400)

        # TODO(mbarbella): Workaround for an issue in the Cloud Storage API. Remove
        # once it is fixed properly upstream:
        # https://github.com/googleapis/google-cloud-python/issues/6572
        if blob_size:
            try:
                content = blobs.read_key(key).decode('utf-8', errors='replace')
            except Exception:
                raise helpers.EarlyExitException('Failed to read content.',
                                                 400)
        else:
            content = u''

        line_count = len(content.splitlines())
        size = len(content)
        title = '%s, %s' % (utils.get_line_count_string(line_count),
                            utils.get_size_string(size))

        return self.render('viewer.html', {'content': content, 'title': title})
예제 #17
0
def get_results():
    """Return results."""
    is_user = access.has_access()
    user_email = helpers.get_user_email()
    external_jobs = external_users.allowed_jobs_for_user(user_email)

    is_external_user = not is_user and external_jobs
    if not is_user and not is_external_user:
        raise helpers.AccessDeniedException()

    if is_user:
        projects = _get_all_project_results()
    else:
        projects = _get_project_results_for_external_user(external_jobs)

    results = {
        'info': {
            'projects': projects,
            'is_internal_user': is_user,
        },
    }
    return results
예제 #18
0
 def test_redirected(self):
     """Test redirected."""
     self.mock.get_access.return_value = access.UserAccess.Redirected
     self.assertFalse(access.has_access(True, 'a', 'b'))
     self.mock.get_access.assert_has_calls([mock.call(True, 'a', 'b')])
예제 #19
0
 def test_allowed(self):
     """Test allowed."""
     self.mock.get_access.return_value = access.UserAccess.Allowed
     self.assertTrue(access.has_access(True, 'a', 'b'))
     self.mock.get_access.assert_has_calls([mock.call(True, 'a', 'b')])
예제 #20
0
    def do_post(self):
        """Upload a testcase."""
        testcase_id = self.request.get('testcaseId')
        uploaded_file = self.get_upload()
        if testcase_id and not uploaded_file:
            testcase = helpers.get_testcase(testcase_id)
            if not access.can_user_access_testcase(testcase):
                raise helpers.AccessDeniedException()

            # Use minimized testcase for upload (if available).
            key = (testcase.minimized_keys if testcase.minimized_keys
                   and testcase.minimized_keys != 'NA' else
                   testcase.fuzzed_keys)

            uploaded_file = blobs.get_blob_info(key)

            # Extract filename part from blob.
            uploaded_file.filename = os.path.basename(
                uploaded_file.filename.replace('\\', os.sep))

        job_type = self.request.get('job')
        if not job_type:
            raise helpers.EarlyExitException('Missing job name.', 400)

        if (not data_types.Job.VALID_NAME_REGEX.match(job_type)
                or not data_types.Job.query(
                    data_types.Job.name == job_type).get()):
            raise helpers.EarlyExitException('Invalid job name.', 400)

        fuzzer_name = ''
        job_type_lowercase = job_type.lower()
        if 'libfuzzer' in job_type_lowercase:
            fuzzer_name = 'libFuzzer'
        elif 'afl' in job_type_lowercase:
            fuzzer_name = 'afl'

        target_name = self.request.get('target')
        if not fuzzer_name and target_name:
            raise helpers.EarlyExitException(
                'Target name is not applicable to non-engine jobs (AFL, libFuzzer).',
                400)

        if fuzzer_name and not target_name:
            raise helpers.EarlyExitException(
                'Missing target name for engine job (AFL, libFuzzer).', 400)

        if (target_name
                and not data_types.Fuzzer.VALID_NAME_REGEX.match(target_name)):
            raise helpers.EarlyExitException('Invalid target name.', 400)

        fully_qualified_fuzzer_name = ''
        if fuzzer_name and target_name:
            fully_qualified_fuzzer_name, target_name = find_fuzz_target(
                fuzzer_name, target_name, job_type)
            if not fully_qualified_fuzzer_name:
                raise helpers.EarlyExitException('Target does not exist.', 400)

        if not access.has_access(need_privileged_access=False,
                                 job_type=job_type,
                                 fuzzer_name=(fully_qualified_fuzzer_name
                                              or fuzzer_name)):
            raise helpers.AccessDeniedException()

        multiple_testcases = bool(self.request.get('multiple'))
        http_flag = bool(self.request.get('http'))
        high_end_job = bool(self.request.get('highEnd'))
        bug_information = self.request.get('issue')
        crash_revision = self.request.get('revision')
        timeout = self.request.get('timeout')
        retries = self.request.get('retries')
        bug_summary_update_flag = bool(self.request.get('updateIssue'))
        additional_arguments = self.request.get('args')
        app_launch_command = self.request.get('cmd')
        platform_id = self.request.get('platform')

        testcase_metadata = self.request.get('metadata')
        if testcase_metadata:
            try:
                testcase_metadata = json.loads(testcase_metadata)
                if not isinstance(testcase_metadata, dict):
                    raise helpers.EarlyExitException(
                        'Metadata is not a JSON object.', 400)
            except Exception:
                raise helpers.EarlyExitException('Invalid metadata JSON.', 400)

        archive_state = 0
        bundled = False
        file_path_input = ''
        email = helpers.get_user_email()

        # If we have a AFL or libFuzzer target, use that for arguments.
        # Launch command looks like
        # python launcher.py {testcase_path} {target_name}
        if target_name:
            additional_arguments = '%%TESTCASE%% %s' % target_name

        # Certain modifications such as app launch command, issue updates are only
        # allowed for privileged users.
        privileged_user = access.has_access(need_privileged_access=True)
        if not privileged_user:
            if bug_information or bug_summary_update_flag:
                raise helpers.EarlyExitException(
                    'You are not privileged to update existing issues.', 400)

            need_privileged_access = utils.string_is_true(
                data_handler.get_value_from_job_definition(
                    job_type, 'PRIVILEGED_ACCESS'))
            if need_privileged_access:
                raise helpers.EarlyExitException(
                    'You are not privileged to run this job type.', 400)

            if app_launch_command:
                raise helpers.EarlyExitException(
                    'You are not privileged to run arbitary launch commands.',
                    400)

            if testcase_metadata:
                raise helpers.EarlyExitException(
                    'You are not privileged to set testcase metadata.', 400)

        if crash_revision and crash_revision.isdigit():
            crash_revision = int(crash_revision)
        else:
            crash_revision = 0

        if bug_information and not bug_information.isdigit():
            raise helpers.EarlyExitException('Bug is not a number.', 400)

        if not timeout:
            timeout = 0
        elif not timeout.isdigit() or timeout == '0':
            raise helpers.EarlyExitException(
                'Testcase timeout must be a number greater than 0.', 400)
        else:
            timeout = int(timeout)
            if timeout > 120:
                raise helpers.EarlyExitException(
                    'Testcase timeout may not be greater than 120 seconds.',
                    400)

        if retries:
            if retries.isdigit():
                retries = int(retries)
            else:
                retries = None

            if retries is None or retries > MAX_RETRIES:
                raise helpers.EarlyExitException(
                    'Testcase retries must be a number less than %d.' %
                    MAX_RETRIES, 400)
        else:
            retries = None

        try:
            gestures = ast.literal_eval(self.request.get('gestures'))
        except:
            gestures = []
        if not gestures:
            gestures = []

        job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job)

        if uploaded_file is not None:
            filename = ''.join([
                x for x in uploaded_file.filename
                if x not in ' ;/?:@&=+$,{}|<>()\\'
            ])
            key = str(uploaded_file.key())
            if archive.is_archive(filename):
                archive_state = data_types.ArchiveStatus.FUZZED
            if archive_state:
                if multiple_testcases:
                    if testcase_metadata:
                        raise helpers.EarlyExitException(
                            'Testcase metadata not supported with multiple testcases.',
                            400)
                    # Create a job to unpack an archive.
                    metadata = data_types.BundledArchiveMetadata()
                    metadata.blobstore_key = key
                    metadata.timeout = timeout
                    metadata.job_queue = job_queue
                    metadata.job_type = job_type
                    metadata.http_flag = http_flag
                    metadata.archive_filename = filename
                    metadata.uploader_email = email
                    metadata.gestures = gestures
                    metadata.crash_revision = crash_revision
                    metadata.additional_arguments = additional_arguments
                    metadata.bug_information = bug_information
                    metadata.platform_id = platform_id
                    metadata.app_launch_command = app_launch_command
                    metadata.fuzzer_name = fuzzer_name
                    metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name
                    metadata.fuzzer_binary_name = target_name
                    metadata.put()

                    tasks.add_task('unpack',
                                   str(metadata.key.id()),
                                   job_type,
                                   queue=tasks.queue_for_job(job_type))

                    # Create a testcase metadata object to show the user their upload.
                    upload_metadata = data_types.TestcaseUploadMetadata()
                    upload_metadata.timestamp = datetime.datetime.utcnow()
                    upload_metadata.filename = filename
                    upload_metadata.blobstore_key = key
                    upload_metadata.original_blobstore_key = key
                    upload_metadata.status = 'Pending'
                    upload_metadata.bundled = True
                    upload_metadata.uploader_email = email
                    upload_metadata.retries = retries
                    upload_metadata.bug_summary_update_flag = bug_summary_update_flag
                    upload_metadata.put()

                    helpers.log('Uploaded multiple testcases.',
                                helpers.VIEW_OPERATION)
                    self.render_json({'multiple': True})
                    return

                file_path_input = guess_input_file(uploaded_file, filename)
                if not file_path_input:
                    raise helpers.EarlyExitException((
                        "Unable to detect which file to launch. The main file\'s name "
                        'must contain either of %s.' % str(RUN_FILE_PATTERNS)),
                                                     400)

        else:
            raise helpers.EarlyExitException('Please select a file to upload.',
                                             400)

        testcase_id = data_handler.create_user_uploaded_testcase(
            key,
            key,
            archive_state,
            filename,
            file_path_input,
            timeout,
            job_type,
            job_queue,
            http_flag,
            gestures,
            additional_arguments,
            bug_information,
            crash_revision,
            email,
            platform_id,
            app_launch_command,
            fuzzer_name,
            fully_qualified_fuzzer_name,
            target_name,
            bundled,
            retries,
            bug_summary_update_flag,
            additional_metadata=testcase_metadata)

        testcase = data_handler.get_testcase_by_id(testcase_id)
        issue = issue_tracker_utils.get_issue_for_testcase(testcase)
        if issue:
            report_url = data_handler.TESTCASE_REPORT_URL.format(
                domain=data_handler.get_domain(), testcase_id=testcase_id)
            comment = ('ClusterFuzz is analyzing your testcase. '
                       'Developers can follow the progress at %s.' %
                       report_url)
            issue.save(new_comment=comment)

        helpers.log('Uploaded testcase %s' % testcase_id,
                    helpers.VIEW_OPERATION)
        self.render_json({'id': '%s' % testcase_id})
예제 #21
0
    def do_post(self):
        """Upload a testcase."""
        email = helpers.get_user_email()
        testcase_id = request.get('testcaseId')
        uploaded_file = self.get_upload()
        if testcase_id and not uploaded_file:
            testcase = helpers.get_testcase(testcase_id)
            if not access.can_user_access_testcase(testcase):
                raise helpers.AccessDeniedException()

            # Use minimized testcase for upload (if available).
            key = (testcase.minimized_keys if testcase.minimized_keys
                   and testcase.minimized_keys != 'NA' else
                   testcase.fuzzed_keys)

            uploaded_file = blobs.get_blob_info(key)

            # Extract filename part from blob.
            uploaded_file.filename = os.path.basename(
                uploaded_file.filename.replace('\\', os.sep))

        job_type = request.get('job')
        if not job_type:
            raise helpers.EarlyExitException('Missing job name.', 400)

        job = data_types.Job.query(data_types.Job.name == job_type).get()
        if not job:
            raise helpers.EarlyExitException('Invalid job name.', 400)

        fuzzer_name = request.get('fuzzer')
        job_type_lowercase = job_type.lower()
        if 'libfuzzer' in job_type_lowercase:
            fuzzer_name = 'libFuzzer'
        elif 'afl' in job_type_lowercase:
            fuzzer_name = 'afl'
        elif 'honggfuzz' in job_type_lowercase:
            fuzzer_name = 'honggfuzz'

        is_engine_job = fuzzer_name and environment.is_engine_fuzzer_job(
            job_type)
        target_name = request.get('target')
        if not is_engine_job and target_name:
            raise helpers.EarlyExitException(
                'Target name is not applicable to non-engine jobs (AFL, libFuzzer).',
                400)

        if is_engine_job and not target_name:
            raise helpers.EarlyExitException(
                'Missing target name for engine job (AFL, libFuzzer).', 400)

        if (target_name
                and not data_types.Fuzzer.VALID_NAME_REGEX.match(target_name)):
            raise helpers.EarlyExitException('Invalid target name.', 400)

        fully_qualified_fuzzer_name = ''
        if is_engine_job and target_name:
            if job.is_external():
                # External jobs don't run and set FuzzTarget entities as part of
                # fuzz_task. Set it here instead.
                fuzz_target = (data_handler.record_fuzz_target(
                    fuzzer_name, target_name, job_type))
                fully_qualified_fuzzer_name = fuzz_target.fully_qualified_name(
                )
                target_name = fuzz_target.binary
            else:
                fully_qualified_fuzzer_name, target_name = find_fuzz_target(
                    fuzzer_name, target_name, job_type)

        if (not access.has_access(need_privileged_access=False,
                                  job_type=job_type,
                                  fuzzer_name=(fully_qualified_fuzzer_name
                                               or fuzzer_name))
                and not _is_uploader_allowed(email)):
            raise helpers.AccessDeniedException()

        multiple_testcases = bool(request.get('multiple'))
        http_flag = bool(request.get('http'))
        high_end_job = bool(request.get('highEnd'))
        bug_information = request.get('issue')
        crash_revision = request.get('revision')
        timeout = request.get('timeout')
        retries = request.get('retries')
        bug_summary_update_flag = bool(request.get('updateIssue'))
        quiet_flag = bool(request.get('quiet'))
        additional_arguments = request.get('args')
        app_launch_command = request.get('cmd')
        platform_id = request.get('platform')
        issue_labels = request.get('issue_labels')
        gestures = request.get('gestures') or '[]'
        stacktrace = request.get('stacktrace')

        crash_data = None
        if job.is_external():
            if not stacktrace:
                raise helpers.EarlyExitException(
                    'Stacktrace required for external jobs.', 400)

            if not crash_revision:
                raise helpers.EarlyExitException(
                    'Revision required for external jobs.', 400)

            crash_data = stack_analyzer.get_crash_data(
                stacktrace,
                fuzz_target=target_name,
                symbolize_flag=False,
                already_symbolized=True,
                detect_ooms_and_hangs=True)
        elif stacktrace:
            raise helpers.EarlyExitException(
                'Should not specify stacktrace for non-external jobs.', 400)

        testcase_metadata = request.get('metadata', {})
        if testcase_metadata:
            try:
                testcase_metadata = json.loads(testcase_metadata)
            except Exception as e:
                raise helpers.EarlyExitException('Invalid metadata JSON.',
                                                 400) from e
            if not isinstance(testcase_metadata, dict):
                raise helpers.EarlyExitException(
                    'Metadata is not a JSON object.', 400)
        if issue_labels:
            testcase_metadata['issue_labels'] = issue_labels

        try:
            gestures = ast.literal_eval(gestures)
        except Exception as e:
            raise helpers.EarlyExitException('Failed to parse gestures.',
                                             400) from e

        archive_state = 0
        bundled = False
        file_path_input = ''

        # Certain modifications such as app launch command, issue updates are only
        # allowed for privileged users.
        privileged_user = access.has_access(need_privileged_access=True)
        if not privileged_user:
            if bug_information or bug_summary_update_flag:
                raise helpers.EarlyExitException(
                    'You are not privileged to update existing issues.', 400)

            need_privileged_access = utils.string_is_true(
                data_handler.get_value_from_job_definition(
                    job_type, 'PRIVILEGED_ACCESS'))
            if need_privileged_access:
                raise helpers.EarlyExitException(
                    'You are not privileged to run this job type.', 400)

            if app_launch_command:
                raise helpers.EarlyExitException(
                    'You are not privileged to run arbitrary launch commands.',
                    400)

            if (testcase_metadata
                    and not _allow_unprivileged_metadata(testcase_metadata)):
                raise helpers.EarlyExitException(
                    'You are not privileged to set testcase metadata.', 400)

            if additional_arguments:
                raise helpers.EarlyExitException(
                    'You are not privileged to add command-line arguments.',
                    400)

            if gestures:
                raise helpers.EarlyExitException(
                    'You are not privileged to run arbitrary gestures.', 400)

        # TODO(aarya): Remove once AFL is migrated to engine pipeline.
        if target_name:
            additional_arguments = '%TESTCASE%'

        if crash_revision and crash_revision.isdigit():
            crash_revision = int(crash_revision)
        else:
            crash_revision = 0

        if bug_information == '0':  # Auto-recover from this bad input.
            bug_information = None
        if bug_information and not bug_information.isdigit():
            raise helpers.EarlyExitException('Bug is not a number.', 400)

        if not timeout:
            timeout = 0
        elif not timeout.isdigit() or timeout == '0':
            raise helpers.EarlyExitException(
                'Testcase timeout must be a number greater than 0.', 400)
        else:
            timeout = int(timeout)
            if timeout > 120:
                raise helpers.EarlyExitException(
                    'Testcase timeout may not be greater than 120 seconds.',
                    400)

        if retries:
            if retries.isdigit():
                retries = int(retries)
            else:
                retries = None

            if retries is None or retries > MAX_RETRIES:
                raise helpers.EarlyExitException(
                    'Testcase retries must be a number less than %d.' %
                    MAX_RETRIES, 400)
        else:
            retries = None

        job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job)

        if uploaded_file is not None:
            filename = ''.join([
                x for x in uploaded_file.filename
                if x not in ' ;/?:@&=+$,{}|<>()\\'
            ])
            key = str(uploaded_file.key())
            if archive.is_archive(filename):
                archive_state = data_types.ArchiveStatus.FUZZED
            if archive_state:
                if multiple_testcases:
                    # Create a job to unpack an archive.
                    metadata = data_types.BundledArchiveMetadata()
                    metadata.blobstore_key = key
                    metadata.timeout = timeout
                    metadata.job_queue = job_queue
                    metadata.job_type = job_type
                    metadata.http_flag = http_flag
                    metadata.archive_filename = filename
                    metadata.uploader_email = email
                    metadata.gestures = gestures
                    metadata.crash_revision = crash_revision
                    metadata.additional_arguments = additional_arguments
                    metadata.bug_information = bug_information
                    metadata.platform_id = platform_id
                    metadata.app_launch_command = app_launch_command
                    metadata.fuzzer_name = fuzzer_name
                    metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name
                    metadata.fuzzer_binary_name = target_name
                    metadata.put()

                    tasks.add_task('unpack',
                                   str(metadata.key.id()),
                                   job_type,
                                   queue=tasks.queue_for_job(job_type))

                    # Create a testcase metadata object to show the user their upload.
                    upload_metadata = data_types.TestcaseUploadMetadata()
                    upload_metadata.timestamp = datetime.datetime.utcnow()
                    upload_metadata.filename = filename
                    upload_metadata.blobstore_key = key
                    upload_metadata.original_blobstore_key = key
                    upload_metadata.status = 'Pending'
                    upload_metadata.bundled = True
                    upload_metadata.uploader_email = email
                    upload_metadata.retries = retries
                    upload_metadata.bug_summary_update_flag = bug_summary_update_flag
                    upload_metadata.quiet_flag = quiet_flag
                    upload_metadata.additional_metadata_string = json.dumps(
                        testcase_metadata)
                    upload_metadata.bug_information = bug_information
                    upload_metadata.put()

                    helpers.log('Uploaded multiple testcases.',
                                helpers.VIEW_OPERATION)
                    return self.render_json({'multiple': True})

                file_path_input = guess_input_file(uploaded_file, filename)
                if not file_path_input:
                    raise helpers.EarlyExitException((
                        "Unable to detect which file to launch. The main file\'s name "
                        'must contain either of %s.' % str(RUN_FILE_PATTERNS)),
                                                     400)

        else:
            raise helpers.EarlyExitException('Please select a file to upload.',
                                             400)

        testcase_id = data_handler.create_user_uploaded_testcase(
            key,
            key,
            archive_state,
            filename,
            file_path_input,
            timeout,
            job,
            job_queue,
            http_flag,
            gestures,
            additional_arguments,
            bug_information,
            crash_revision,
            email,
            platform_id,
            app_launch_command,
            fuzzer_name,
            fully_qualified_fuzzer_name,
            target_name,
            bundled,
            retries,
            bug_summary_update_flag,
            quiet_flag,
            additional_metadata=testcase_metadata,
            crash_data=crash_data)

        if not quiet_flag:
            testcase = data_handler.get_testcase_by_id(testcase_id)
            issue = issue_tracker_utils.get_issue_for_testcase(testcase)
            if issue:
                report_url = data_handler.TESTCASE_REPORT_URL.format(
                    domain=data_handler.get_domain(), testcase_id=testcase_id)

                comment = ('ClusterFuzz is analyzing your testcase. '
                           'Developers can follow the progress at %s.' %
                           report_url)
                issue.save(new_comment=comment)

        helpers.log('Uploaded testcase %s' % testcase_id,
                    helpers.VIEW_OPERATION)
        return self.render_json({'id': '%s' % testcase_id})
예제 #22
0
def get_testcase_detail(testcase):
  """Get testcase detail for rendering the testcase detail page."""
  config = db_config.get()
  crash_address = testcase.crash_address
  crash_state = testcase.crash_state
  crash_state_lines = crash_state.strip().splitlines()
  crash_type = data_handler.get_crash_type_string(testcase)
  reproduction_help_url = data_handler.get_reproduction_help_url(
      testcase, config)
  external_user = not access.has_access(job_type=testcase.job_type)
  issue_url = issue_tracker_utils.get_issue_url(testcase)
  metadata = testcase.get_metadata()
  original_testcase_size = _get_blob_size_string(testcase.fuzzed_keys)
  minimized_testcase_size = _get_blob_size_string(testcase.minimized_keys)
  has_issue_tracker = bool(data_handler.get_issue_tracker_name())

  if not testcase.regression:
    regression = 'Pending'
  elif testcase.regression == 'NA':
    regression = 'NA'
  else:
    regression = _get_revision_range_html_from_string(testcase.job_type,
                                                      testcase.regression)

  fixed_full = None
  if 'progression_pending' in metadata:
    fixed = 'Pending'
  elif not testcase.fixed:
    fixed = 'NO'
  elif testcase.fixed == 'NA':
    fixed = 'NA'
  elif testcase.fixed == 'Yes':
    fixed = 'YES'
  else:
    fixed = 'YES'
    fixed_full = _get_revision_range_html_from_string(testcase.job_type,
                                                      testcase.fixed)

  last_tested = None
  last_tested_revision = (
      metadata.get('last_tested_revision') or testcase.crash_revision)
  if last_tested_revision:
    last_tested = _get_revision_range_html(testcase.job_type,
                                           last_tested_revision)

  crash_revision = testcase.crash_revision
  crash_revisions_dict = revisions.get_component_revisions_dict(
      crash_revision, testcase.job_type)
  crash_stacktrace = data_handler.get_stacktrace(testcase)
  crash_stacktrace = filter_stacktrace(crash_stacktrace, testcase.crash_type,
                                       crash_revisions_dict)
  crash_stacktrace = convert_to_lines(crash_stacktrace, crash_state_lines,
                                      crash_type)
  crash_stacktrace_preview_lines = _preview_stacktrace(crash_stacktrace)

  second_crash_stacktrace_revision = metadata.get(
      'second_crash_stacktrace_revision')
  second_crash_stacktrace_revisions_dict = (
      revisions.get_component_revisions_dict(second_crash_stacktrace_revision,
                                             testcase.job_type))
  second_crash_stacktrace = data_handler.get_stacktrace(
      testcase, stack_attribute='second_crash_stacktrace')
  second_crash_stacktrace = filter_stacktrace(
      second_crash_stacktrace, testcase.crash_type,
      second_crash_stacktrace_revisions_dict)
  second_crash_stacktrace = convert_to_lines(second_crash_stacktrace,
                                             crash_state_lines, crash_type)
  second_crash_stacktrace_preview_lines = _preview_stacktrace(
      second_crash_stacktrace)

  last_tested_crash_revision = metadata.get('last_tested_crash_revision')
  last_tested_crash_revisions_dict = revisions.get_component_revisions_dict(
      last_tested_crash_revision, testcase.job_type)
  last_tested_crash_stacktrace = data_handler.get_stacktrace(
      testcase, stack_attribute='last_tested_crash_stacktrace')
  last_tested_crash_stacktrace = filter_stacktrace(
      last_tested_crash_stacktrace, testcase.crash_type,
      last_tested_crash_revisions_dict)
  last_tested_crash_stacktrace = convert_to_lines(last_tested_crash_stacktrace,
                                                  crash_state_lines, crash_type)
  last_tested_crash_stacktrace_preview_lines = _preview_stacktrace(
      last_tested_crash_stacktrace)

  privileged_user = access.has_access(need_privileged_access=True)

  # Fix build url link. |storage.cloud.google.com| takes care of using the
  # right set of authentication credentials needed to access the link.
  if 'build_url' in metadata:
    metadata['build_url'] = metadata['build_url'].replace(
        'gs://', 'https://storage.cloud.google.com/')

  pending_blame_task = (
      testcase.has_blame() and 'blame_pending' in metadata and
      metadata['blame_pending'])
  pending_impact_task = (
      testcase.has_impacts() and not testcase.is_impact_set_flag)
  pending_minimize_task = not testcase.minimized_keys
  pending_progression_task = ('progression_pending' in metadata and
                              metadata['progression_pending'])
  pending_regression_task = not testcase.regression
  pending_stack_task = testcase.last_tested_crash_stacktrace == 'Pending'
  needs_refresh = (
      testcase.status == 'Pending' or
      ((testcase.status == 'Processed' or testcase.status == 'Duplicate') and
       (pending_blame_task or pending_impact_task or pending_minimize_task or
        pending_progression_task or pending_regression_task or
        pending_stack_task)))

  if data_types.SecuritySeverity.is_valid(testcase.security_severity):
    security_severity = severity_analyzer.severity_to_string(
        testcase.security_severity)
  else:
    security_severity = None

  auto_delete_timestamp = None
  auto_close_timestamp = None

  if testcase.one_time_crasher_flag:
    last_crash_time = (
        crash_stats.get_last_crash_time(testcase) or testcase.timestamp)

    # Set auto-delete timestamp for unreproducible testcases with
    # no associated bug.
    if not testcase.bug_information:
      auto_delete_timestamp = utils.utc_datetime_to_timestamp(
          last_crash_time + datetime.timedelta(
              days=data_types.UNREPRODUCIBLE_TESTCASE_NO_BUG_DEADLINE))

    # Set auto-close timestamp for unreproducible testcases with
    # an associated bug.
    if testcase.open and testcase.bug_information:
      auto_close_timestamp = utils.utc_datetime_to_timestamp(
          last_crash_time + datetime.timedelta(
              days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE))

  memory_tool_display_string = environment.get_memory_tool_display_string(
      testcase.job_type)
  memory_tool_display_label = memory_tool_display_string.split(':')[0]
  memory_tool_display_value = memory_tool_display_string.split(':')[1].strip()

  helpers.log('Testcase %s' % testcase.key.id(), helpers.VIEW_OPERATION)
  return {
      'id':
          testcase.key.id(),
      'crash_type':
          crash_type,
      'crash_address':
          crash_address,
      'crash_state':
          crash_state,  # Used by reproduce tool.
      'crash_state_lines':
          crash_state_lines,
      'crash_revision':
          testcase.crash_revision,
      'csrf_token':
          form.generate_csrf_token(),
      'external_user':
          external_user,
      'footer':
          testcase.comments,
      'fixed':
          fixed,
      'fixed_full':
          fixed_full,
      'issue_url':
          issue_url,
      'is_admin':
          auth.is_current_user_admin(),
      'metadata':
          metadata,
      'minimized_testcase_size':
          minimized_testcase_size,
      'needs_refresh':
          needs_refresh,
      'original_testcase_size':
          original_testcase_size,
      'privileged_user':
          privileged_user,
      'regression':
          regression,
      'crash_stacktrace': {
          'lines':
              crash_stacktrace,
          'preview_lines':
              crash_stacktrace_preview_lines,
          'revision':
              revisions.get_real_revision(
                  crash_revision, testcase.job_type, display=True)
      },
      'second_crash_stacktrace': {
          'lines':
              second_crash_stacktrace,
          'preview_lines':
              second_crash_stacktrace_preview_lines,
          'revision':
              revisions.get_real_revision(
                  second_crash_stacktrace_revision,
                  testcase.job_type,
                  display=True)
      },
      'last_tested_crash_stacktrace': {
          'lines':
              last_tested_crash_stacktrace,
          'preview_lines':
              last_tested_crash_stacktrace_preview_lines,
          'revision':
              revisions.get_real_revision(
                  last_tested_crash_revision, testcase.job_type, display=True)
      },
      'security_severity':
          security_severity,
      'security_severities':
          data_types.SecuritySeverity.list(),
      'stats': {
          'min_hour': crash_stats.get_min_hour(),
          'max_hour': crash_stats.get_max_hour(),
      },
      'suspected_cls':
          _parse_suspected_cls(metadata.get('predator_result')),
      'testcase':
          testcase,
      'timestamp':
          utils.utc_datetime_to_timestamp(testcase.timestamp),
      'show_blame':
          testcase.has_blame(),
      'show_impact':
          testcase.has_impacts(),
      'impacts_production':
          testcase.impacts_production(),
      'find_similar_issues_options':
          FIND_SIMILAR_ISSUES_OPTIONS,
      'auto_delete_timestamp':
          auto_delete_timestamp,
      'auto_close_timestamp':
          auto_close_timestamp,
      'memory_tool_display_label':
          memory_tool_display_label,
      'memory_tool_display_value':
          memory_tool_display_value,
      'last_tested':
          last_tested,
      'is_admin_or_not_oss_fuzz':
          is_admin_or_not_oss_fuzz(),
      'has_issue_tracker':
          has_issue_tracker,
      'reproduction_help_url':
          reproduction_help_url,
      'is_local_development':
          environment.is_running_on_app_engine_development(),
  }
예제 #23
0
def get_testcase_detail(testcase):
    """Get testcase detail for rendering the testcase detail page."""
    config = db_config.get()
    crash_address = testcase.crash_address
    crash_state = testcase.crash_state
    crash_state_lines = crash_state.strip().splitlines()
    crash_type = data_handler.get_crash_type_string(testcase)
    external_user = not access.has_access(job_type=testcase.job_type)
    issue_url = issue_tracker_utils.get_issue_url(testcase)
    metadata = testcase.get_metadata()
    original_testcase_size = _get_blob_size_string(testcase.fuzzed_keys)
    minimized_testcase_size = _get_blob_size_string(testcase.minimized_keys)
    has_issue_tracker = bool(data_handler.get_issue_tracker_name())

    fuzzer_display = data_handler.get_fuzzer_display(testcase)

    formatted_reproduction_help = _format_reproduction_help(
        data_handler.get_formatted_reproduction_help(testcase))
    # When we have a HELP_TEMPLATE, ignore any default values set for HELP_URL.
    if not formatted_reproduction_help:
        reproduction_help_url = data_handler.get_reproduction_help_url(
            testcase, config)
    else:
        reproduction_help_url = None

    if not testcase.regression:
        regression = "Pending"
    elif testcase.regression == "NA":
        regression = "NA"
    else:
        regression = _get_revision_range_html_from_string(
            testcase.job_type, testcase.regression)

    fixed_full = None
    if "progression_pending" in metadata:
        fixed = "Pending"
    elif not testcase.fixed:
        fixed = "NO"
    elif testcase.fixed == "NA":
        fixed = "NA"
    elif testcase.fixed == "Yes":
        fixed = "YES"
    else:
        fixed = "YES"
        fixed_full = _get_revision_range_html_from_string(
            testcase.job_type, testcase.fixed)

    last_tested = None
    last_tested_revision = (metadata.get("last_tested_revision")
                            or testcase.crash_revision)
    if last_tested_revision:
        last_tested = _get_revision_range_html(testcase.job_type,
                                               last_tested_revision)

    crash_revision = testcase.crash_revision
    crash_revisions_dict = revisions.get_component_revisions_dict(
        crash_revision, testcase.job_type)
    crash_stacktrace = data_handler.get_stacktrace(testcase)
    crash_stacktrace = filter_stacktrace(crash_stacktrace, testcase.crash_type,
                                         crash_revisions_dict)
    crash_stacktrace = convert_to_lines(crash_stacktrace, crash_state_lines,
                                        crash_type)
    crash_stacktrace_preview_lines = _preview_stacktrace(crash_stacktrace)

    last_tested_crash_revision = metadata.get("last_tested_crash_revision")
    last_tested_crash_revisions_dict = revisions.get_component_revisions_dict(
        last_tested_crash_revision, testcase.job_type)
    last_tested_crash_stacktrace = data_handler.get_stacktrace(
        testcase, stack_attribute="last_tested_crash_stacktrace")
    last_tested_crash_stacktrace = filter_stacktrace(
        last_tested_crash_stacktrace,
        testcase.crash_type,
        last_tested_crash_revisions_dict,
    )
    last_tested_crash_stacktrace = convert_to_lines(
        last_tested_crash_stacktrace, crash_state_lines, crash_type)
    last_tested_crash_stacktrace_preview_lines = _preview_stacktrace(
        last_tested_crash_stacktrace)

    privileged_user = access.has_access(need_privileged_access=True)

    # Fix build url link. |storage.cloud.google.com| takes care of using the
    # right set of authentication credentials needed to access the link.
    if "build_url" in metadata:
        metadata["build_url"] = metadata["build_url"].replace(
            "gs://", "https://storage.cloud.google.com/")

    pending_blame_task = (testcase.has_blame() and "blame_pending" in metadata
                          and metadata["blame_pending"])
    pending_impact_task = testcase.has_impacts(
    ) and not testcase.is_impact_set_flag
    pending_minimize_task = not testcase.minimized_keys
    pending_progression_task = ("progression_pending" in metadata
                                and metadata["progression_pending"])
    pending_regression_task = not testcase.regression
    pending_stack_task = testcase.last_tested_crash_stacktrace == "Pending"
    needs_refresh = testcase.status == "Pending" or (
        (testcase.status == "Processed" or testcase.status == "Duplicate") and
        (pending_blame_task or pending_impact_task or pending_minimize_task
         or pending_progression_task or pending_regression_task
         or pending_stack_task))

    if data_types.SecuritySeverity.is_valid(testcase.security_severity):
        security_severity = severity_analyzer.severity_to_string(
            testcase.security_severity)
    else:
        security_severity = None

    auto_delete_timestamp = None
    auto_close_timestamp = None

    if testcase.one_time_crasher_flag:
        last_crash_time = (crash_stats.get_last_crash_time(testcase)
                           or testcase.timestamp)

        # Set auto-delete timestamp for unreproducible testcases with
        # no associated bug.
        if not testcase.bug_information:
            auto_delete_timestamp = utils.utc_datetime_to_timestamp(
                last_crash_time + datetime.timedelta(
                    days=data_types.UNREPRODUCIBLE_TESTCASE_NO_BUG_DEADLINE))

        # Set auto-close timestamp for unreproducible testcases with
        # an associated bug.
        if testcase.open and testcase.bug_information:
            auto_close_timestamp = utils.utc_datetime_to_timestamp(
                last_crash_time + datetime.timedelta(
                    days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE))

    memory_tool_display_string = environment.get_memory_tool_display_string(
        testcase.job_type)
    memory_tool_display_label = memory_tool_display_string.split(":")[0]
    memory_tool_display_value = memory_tool_display_string.split(
        ":")[1].strip()

    helpers.log("Testcase %s" % testcase.key.id(), helpers.VIEW_OPERATION)
    return {
        "id": testcase.key.id(),
        "crash_type": crash_type,
        "crash_address": crash_address,
        "crash_state": crash_state,  # Used by reproduce tool.
        "crash_state_lines": crash_state_lines,
        "crash_revision": testcase.crash_revision,
        "csrf_token": form.generate_csrf_token(),
        "external_user": external_user,
        "footer": testcase.comments,
        "formatted_reproduction_help": formatted_reproduction_help,
        "fixed": fixed,
        "fixed_full": fixed_full,
        "issue_url": issue_url,
        "is_admin": auth.is_current_user_admin(),
        "metadata": metadata,
        "minimized_testcase_size": minimized_testcase_size,
        "needs_refresh": needs_refresh,
        "original_testcase_size": original_testcase_size,
        "privileged_user": privileged_user,
        "regression": regression,
        "crash_stacktrace": {
            "lines":
            crash_stacktrace,
            "preview_lines":
            crash_stacktrace_preview_lines,
            "revision":
            revisions.get_real_revision(crash_revision,
                                        testcase.job_type,
                                        display=True),
        },
        "last_tested_crash_stacktrace": {
            "lines":
            last_tested_crash_stacktrace,
            "preview_lines":
            last_tested_crash_stacktrace_preview_lines,
            "revision":
            revisions.get_real_revision(last_tested_crash_revision,
                                        testcase.job_type,
                                        display=True),
        },
        "security_severity": security_severity,
        "security_severities": data_types.SecuritySeverity.list(),
        "stats": {
            "min_hour": crash_stats.get_min_hour(),
            "max_hour": crash_stats.get_max_hour(),
        },
        "suspected_cls": _parse_suspected_cls(metadata.get("predator_result")),
        "testcase": testcase,
        "timestamp": utils.utc_datetime_to_timestamp(testcase.timestamp),
        "show_blame": testcase.has_blame(),
        "show_impact": testcase.has_impacts(),
        "impacts_production": testcase.impacts_production(),
        "find_similar_issues_options": FIND_SIMILAR_ISSUES_OPTIONS,
        "auto_delete_timestamp": auto_delete_timestamp,
        "auto_close_timestamp": auto_close_timestamp,
        "memory_tool_display_label": memory_tool_display_label,
        "memory_tool_display_value": memory_tool_display_value,
        "last_tested": last_tested,
        "is_admin_or_not_oss_fuzz": is_admin_or_not_oss_fuzz(),
        "has_issue_tracker": has_issue_tracker,
        "reproduction_help_url": reproduction_help_url,
        "is_local_development":
        environment.is_running_on_app_engine_development(),
        "fuzzer_display": vars(fuzzer_display),
    }
예제 #24
0
    def do_post(self):
        """Upload a testcase."""
        email = helpers.get_user_email()
        testcase_id = self.request.get("testcaseId")
        uploaded_file = self.get_upload()
        if testcase_id and not uploaded_file:
            testcase = helpers.get_testcase(testcase_id)
            if not access.can_user_access_testcase(testcase):
                raise helpers.AccessDeniedException()

            # Use minimized testcase for upload (if available).
            key = (testcase.minimized_keys if testcase.minimized_keys
                   and testcase.minimized_keys != "NA" else
                   testcase.fuzzed_keys)

            uploaded_file = blobs.get_blob_info(key)

            # Extract filename part from blob.
            uploaded_file.filename = os.path.basename(
                uploaded_file.filename.replace("\\", os.sep))

        job_type = self.request.get("job")
        if not job_type:
            raise helpers.EarlyExitException("Missing job name.", 400)

        if (not data_types.Job.VALID_NAME_REGEX.match(job_type)
                or not data_types.Job.query(
                    data_types.Job.name == job_type).get()):
            raise helpers.EarlyExitException("Invalid job name.", 400)

        fuzzer_name = ""
        job_type_lowercase = job_type.lower()
        if "libfuzzer" in job_type_lowercase:
            fuzzer_name = "libFuzzer"
        elif "afl" in job_type_lowercase:
            fuzzer_name = "afl"

        target_name = self.request.get("target")
        if not fuzzer_name and target_name:
            raise helpers.EarlyExitException(
                "Target name is not applicable to non-engine jobs (AFL, libFuzzer).",
                400,
            )

        if fuzzer_name and not target_name:
            raise helpers.EarlyExitException(
                "Missing target name for engine job (AFL, libFuzzer).", 400)

        if target_name and not data_types.Fuzzer.VALID_NAME_REGEX.match(
                target_name):
            raise helpers.EarlyExitException("Invalid target name.", 400)

        fully_qualified_fuzzer_name = ""
        if fuzzer_name and target_name:
            fully_qualified_fuzzer_name, target_name = find_fuzz_target(
                fuzzer_name, target_name, job_type)
            if not fully_qualified_fuzzer_name:
                raise helpers.EarlyExitException("Target does not exist.", 400)

        if not access.has_access(
                need_privileged_access=False,
                job_type=job_type,
                fuzzer_name=(fully_qualified_fuzzer_name or fuzzer_name),
        ) and not _is_uploader_allowed(email):
            raise helpers.AccessDeniedException()

        multiple_testcases = bool(self.request.get("multiple"))
        http_flag = bool(self.request.get("http"))
        high_end_job = bool(self.request.get("highEnd"))
        bug_information = self.request.get("issue")
        crash_revision = self.request.get("revision")
        timeout = self.request.get("timeout")
        retries = self.request.get("retries")
        bug_summary_update_flag = bool(self.request.get("updateIssue"))
        quiet_flag = bool(self.request.get("quiet"))
        additional_arguments = self.request.get("args")
        app_launch_command = self.request.get("cmd")
        platform_id = self.request.get("platform")
        issue_labels = self.request.get("issue_labels")
        gestures = self.request.get("gestures") or "[]"

        testcase_metadata = self.request.get("metadata", {})
        if testcase_metadata:
            try:
                testcase_metadata = json.loads(testcase_metadata)
            except Exception:
                raise helpers.EarlyExitException("Invalid metadata JSON.", 400)
            if not isinstance(testcase_metadata, dict):
                raise helpers.EarlyExitException(
                    "Metadata is not a JSON object.", 400)
        if issue_labels:
            testcase_metadata["issue_labels"] = issue_labels

        try:
            gestures = ast.literal_eval(gestures)
        except Exception:
            raise helpers.EarlyExitException("Failed to parse gestures.", 400)

        archive_state = 0
        bundled = False
        file_path_input = ""

        # Certain modifications such as app launch command, issue updates are only
        # allowed for privileged users.
        privileged_user = access.has_access(need_privileged_access=True)
        if not privileged_user:
            if bug_information or bug_summary_update_flag:
                raise helpers.EarlyExitException(
                    "You are not privileged to update existing issues.", 400)

            need_privileged_access = utils.string_is_true(
                data_handler.get_value_from_job_definition(
                    job_type, "PRIVILEGED_ACCESS"))
            if need_privileged_access:
                raise helpers.EarlyExitException(
                    "You are not privileged to run this job type.", 400)

            if app_launch_command:
                raise helpers.EarlyExitException(
                    "You are not privileged to run arbitrary launch commands.",
                    400)

            if testcase_metadata and not _allow_unprivileged_metadata(
                    testcase_metadata):
                raise helpers.EarlyExitException(
                    "You are not privileged to set testcase metadata.", 400)

            if additional_arguments:
                raise helpers.EarlyExitException(
                    "You are not privileged to add command-line arguments.",
                    400)

            if gestures:
                raise helpers.EarlyExitException(
                    "You are not privileged to run arbitrary gestures.", 400)

        # TODO(aarya): Remove once AFL is migrated to engine pipeline.
        if target_name:
            additional_arguments = "%TESTCASE%"

        if crash_revision and crash_revision.isdigit():
            crash_revision = int(crash_revision)
        else:
            crash_revision = 0

        if bug_information == "0":  # Auto-recover from this bad input.
            bug_information = None
        if bug_information and not bug_information.isdigit():
            raise helpers.EarlyExitException("Bug is not a number.", 400)

        if not timeout:
            timeout = 0
        elif not timeout.isdigit() or timeout == "0":
            raise helpers.EarlyExitException(
                "Testcase timeout must be a number greater than 0.", 400)
        else:
            timeout = int(timeout)
            if timeout > 120:
                raise helpers.EarlyExitException(
                    "Testcase timeout may not be greater than 120 seconds.",
                    400)

        if retries:
            if retries.isdigit():
                retries = int(retries)
            else:
                retries = None

            if retries is None or retries > MAX_RETRIES:
                raise helpers.EarlyExitException(
                    "Testcase retries must be a number less than %d." %
                    MAX_RETRIES, 400)
        else:
            retries = None

        job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job)

        if uploaded_file is not None:
            filename = "".join(x for x in uploaded_file.filename
                               if x not in " ;/?:@&=+$,{}|<>()\\")
            key = str(uploaded_file.key())
            if archive.is_archive(filename):
                archive_state = data_types.ArchiveStatus.FUZZED
            if archive_state:
                if multiple_testcases:
                    # Create a job to unpack an archive.
                    metadata = data_types.BundledArchiveMetadata()
                    metadata.blobstore_key = key
                    metadata.timeout = timeout
                    metadata.job_queue = job_queue
                    metadata.job_type = job_type
                    metadata.http_flag = http_flag
                    metadata.archive_filename = filename
                    metadata.uploader_email = email
                    metadata.gestures = gestures
                    metadata.crash_revision = crash_revision
                    metadata.additional_arguments = additional_arguments
                    metadata.bug_information = bug_information
                    metadata.platform_id = platform_id
                    metadata.app_launch_command = app_launch_command
                    metadata.fuzzer_name = fuzzer_name
                    metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name
                    metadata.fuzzer_binary_name = target_name
                    metadata.put()

                    tasks.add_task(
                        "unpack",
                        str(metadata.key.id()),
                        job_type,
                        queue=tasks.queue_for_job(job_type),
                    )

                    # Create a testcase metadata object to show the user their upload.
                    upload_metadata = data_types.TestcaseUploadMetadata()
                    upload_metadata.timestamp = datetime.datetime.utcnow()
                    upload_metadata.filename = filename
                    upload_metadata.blobstore_key = key
                    upload_metadata.original_blobstore_key = key
                    upload_metadata.status = "Pending"
                    upload_metadata.bundled = True
                    upload_metadata.uploader_email = email
                    upload_metadata.retries = retries
                    upload_metadata.bug_summary_update_flag = bug_summary_update_flag
                    upload_metadata.quiet_flag = quiet_flag
                    upload_metadata.additional_metadata_string = json.dumps(
                        testcase_metadata)
                    upload_metadata.put()

                    helpers.log("Uploaded multiple testcases.",
                                helpers.VIEW_OPERATION)
                    self.render_json({"multiple": True})
                    return

                file_path_input = guess_input_file(uploaded_file, filename)
                if not file_path_input:
                    raise helpers.EarlyExitException(
                        ("Unable to detect which file to launch. The main file's name "
                         "must contain either of %s." %
                         str(RUN_FILE_PATTERNS)),
                        400,
                    )

        else:
            raise helpers.EarlyExitException("Please select a file to upload.",
                                             400)

        testcase_id = data_handler.create_user_uploaded_testcase(
            key,
            key,
            archive_state,
            filename,
            file_path_input,
            timeout,
            job_type,
            job_queue,
            http_flag,
            gestures,
            additional_arguments,
            bug_information,
            crash_revision,
            email,
            platform_id,
            app_launch_command,
            fuzzer_name,
            fully_qualified_fuzzer_name,
            target_name,
            bundled,
            retries,
            bug_summary_update_flag,
            quiet_flag,
            additional_metadata=testcase_metadata,
        )

        if not quiet_flag:
            testcase = data_handler.get_testcase_by_id(testcase_id)
            issue = issue_tracker_utils.get_issue_for_testcase(testcase)
            if issue:
                report_url = data_handler.TESTCASE_REPORT_URL.format(
                    domain=data_handler.get_domain(), testcase_id=testcase_id)

                comment = ("ClusterFuzz is analyzing your testcase. "
                           "Developers can follow the progress at %s." %
                           report_url)
                issue.save(new_comment=comment)

        helpers.log("Uploaded testcase %s" % testcase_id,
                    helpers.VIEW_OPERATION)
        self.render_json({"id": "%s" % testcase_id})