Example #1
0
def _make_bisection_request(pubsub_topic, testcase, target, bisect_type):
    """Make a bisection request to the external bisection service. Returns whether
  or not a request was actually made."""
    if bisect_type == 'fixed':
        old_commit, new_commit = _get_commits(testcase.fixed,
                                              testcase.job_type)
    elif bisect_type == 'regressed':
        old_commit, new_commit = _get_commits(testcase.regression,
                                              testcase.job_type)
    else:
        raise ValueError('Invalid bisection type: ' + bisect_type)

    if not new_commit:
        # old_commit can be empty (i.e. '0' case), but new_commit should never be.
        return False

    old_commit, new_commit = _check_commits(testcase, bisect_type, old_commit,
                                            new_commit)

    repo_url = data_handler.get_main_repo(testcase.job_type) or ''
    reproducer = blobs.read_key(testcase.minimized_keys
                                or testcase.fuzzed_keys)
    pubsub_client = pubsub.PubSubClient()
    pubsub_client.publish(pubsub_topic, [
        pubsub.Message(
            reproducer, {
                'type':
                bisect_type,
                'project_name':
                target.project,
                'sanitizer':
                environment.SANITIZER_NAME_MAP[
                    environment.get_memory_tool_name(testcase.job_type)],
                'fuzz_target':
                target.binary,
                'old_commit':
                old_commit,
                'new_commit':
                new_commit,
                'testcase_id':
                str(testcase.key.id()),
                'issue_id':
                testcase.bug_information,
                'crash_type':
                testcase.crash_type,
                'crash_state':
                testcase.crash_state,
                'security':
                str(testcase.security_flag),
                'severity':
                severity_analyzer.severity_to_string(
                    testcase.security_severity),
                'timestamp':
                testcase.timestamp.isoformat(),
                'repo_url':
                repo_url,
            })
    ])
    return True
Example #2
0
def severity_substitution(label, testcase, security_severity):
    """Severity substitution."""
    # Use severity from testcase if one is not available.
    if security_severity is None:
        security_severity = testcase.security_severity

    # Set to default high severity if we can't determine it automatically.
    if not data_types.SecuritySeverity.is_valid(security_severity):
        security_severity = data_types.SecuritySeverity.HIGH

    security_severity_string = severity_analyzer.severity_to_string(
        security_severity)
    return [label.replace('%SEVERITY%', security_severity_string)]
Example #3
0
def get_testcase_detail(testcase):
    """Get testcase detail for rendering the testcase detail page."""
    config = db_config.get()
    crash_address = testcase.crash_address
    crash_state = testcase.crash_state
    crash_state_lines = crash_state.strip().splitlines()
    crash_type = data_handler.get_crash_type_string(testcase)
    external_user = not access.has_access(job_type=testcase.job_type)
    issue_url = issue_tracker_utils.get_issue_url(testcase)
    metadata = testcase.get_metadata()
    original_testcase_size = _get_blob_size_string(testcase.fuzzed_keys)
    minimized_testcase_size = _get_blob_size_string(testcase.minimized_keys)
    has_issue_tracker = bool(data_handler.get_issue_tracker_name())

    fuzzer_display = data_handler.get_fuzzer_display(testcase)

    formatted_reproduction_help = _format_reproduction_help(
        data_handler.get_formatted_reproduction_help(testcase))
    # When we have a HELP_TEMPLATE, ignore any default values set for HELP_URL.
    if not formatted_reproduction_help:
        reproduction_help_url = data_handler.get_reproduction_help_url(
            testcase, config)
    else:
        reproduction_help_url = None

    if not testcase.regression:
        regression = 'Pending'
    elif testcase.regression == 'NA':
        regression = 'NA'
    else:
        regression = _get_revision_range_html_from_string(
            testcase.job_type, testcase.platform_id, testcase.regression)

    fixed_full = None
    if 'progression_pending' in metadata:
        fixed = 'Pending'
    elif not testcase.fixed:
        fixed = 'NO'
    elif testcase.fixed == 'NA':
        fixed = 'NA'
    elif testcase.fixed == 'Yes':
        fixed = 'YES'
    else:
        fixed = 'YES'
        fixed_full = _get_revision_range_html_from_string(
            testcase.job_type, testcase.platform_id, testcase.fixed)

    last_tested = None
    last_tested_revision = (metadata.get('last_tested_revision')
                            or testcase.crash_revision)
    if last_tested_revision:
        last_tested = _get_revision_range_html(testcase.job_type,
                                               testcase.platform_id,
                                               last_tested_revision)

    crash_revision = testcase.crash_revision
    crash_revisions_dict = revisions.get_component_revisions_dict(
        crash_revision, testcase.job_type, platform_id=testcase.platform_id)
    crash_stacktrace = data_handler.get_stacktrace(testcase)
    crash_stacktrace = filter_stacktrace(crash_stacktrace, testcase.crash_type,
                                         crash_revisions_dict,
                                         testcase.platform, testcase.job_type)
    crash_stacktrace = convert_to_lines(crash_stacktrace, crash_state_lines,
                                        crash_type)

    last_tested_crash_revision = metadata.get('last_tested_crash_revision')
    last_tested_crash_revisions_dict = revisions.get_component_revisions_dict(
        last_tested_crash_revision,
        testcase.job_type,
        platform_id=testcase.platform_id)
    last_tested_crash_stacktrace = data_handler.get_stacktrace(
        testcase, stack_attribute='last_tested_crash_stacktrace')
    last_tested_crash_stacktrace = filter_stacktrace(
        last_tested_crash_stacktrace, testcase.crash_type,
        last_tested_crash_revisions_dict, testcase.platform, testcase.job_type)
    last_tested_crash_stacktrace = convert_to_lines(
        last_tested_crash_stacktrace, crash_state_lines, crash_type)

    privileged_user = access.has_access(need_privileged_access=True)

    # Fix build url link. |storage.cloud.google.com| takes care of using the
    # right set of authentication credentials needed to access the link.
    if 'build_url' in metadata:
        metadata['build_url'] = metadata['build_url'].replace(
            'gs://', 'https://storage.cloud.google.com/')

    pending_blame_task = (testcase.has_blame() and 'blame_pending' in metadata
                          and metadata['blame_pending'])
    pending_impact_task = (testcase.has_impacts()
                           and not testcase.is_impact_set_flag)
    pending_minimize_task = not testcase.minimized_keys
    pending_progression_task = ('progression_pending' in metadata
                                and metadata['progression_pending'])
    pending_regression_task = not testcase.regression
    pending_stack_task = testcase.last_tested_crash_stacktrace == 'Pending'
    needs_refresh = (testcase.status == 'Pending' or (
        (testcase.status == 'Processed' or testcase.status == 'Duplicate') and
        (pending_blame_task or pending_impact_task or pending_minimize_task
         or pending_progression_task or pending_regression_task
         or pending_stack_task)))

    if data_types.SecuritySeverity.is_valid(testcase.security_severity):
        security_severity = severity_analyzer.severity_to_string(
            testcase.security_severity)
    else:
        security_severity = None

    auto_delete_timestamp = None
    auto_close_timestamp = None

    if testcase.one_time_crasher_flag:
        last_crash_time = (crash_stats.get_last_crash_time(testcase)
                           or testcase.timestamp)

        # Set auto-delete timestamp for unreproducible testcases with
        # no associated bug.
        if not testcase.bug_information:
            auto_delete_timestamp = utils.utc_datetime_to_timestamp(
                last_crash_time + datetime.timedelta(
                    days=data_types.UNREPRODUCIBLE_TESTCASE_NO_BUG_DEADLINE))

        # Set auto-close timestamp for unreproducible testcases with
        # an associated bug.
        if testcase.open and testcase.bug_information:
            auto_close_timestamp = utils.utc_datetime_to_timestamp(
                last_crash_time + datetime.timedelta(
                    days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE))

    memory_tool_display_string = environment.get_memory_tool_display_string(
        testcase.job_type)
    memory_tool_display_label = memory_tool_display_string.split(':')[0]
    memory_tool_display_value = memory_tool_display_string.split(
        ':')[1].strip()

    helpers.log('Testcase %s' % testcase.key.id(), helpers.VIEW_OPERATION)
    return {
        'id': testcase.key.id(),
        'crash_type': crash_type,
        'crash_address': crash_address,
        'crash_state': crash_state,  # Used by reproduce tool.
        'crash_state_lines': crash_state_lines,
        'crash_revision': testcase.crash_revision,
        'csrf_token': form.generate_csrf_token(),
        'external_user': external_user,
        'footer': testcase.comments,
        'formatted_reproduction_help': formatted_reproduction_help,
        'fixed': fixed,
        'fixed_full': fixed_full,
        'issue_url': issue_url,
        'is_admin': auth.is_current_user_admin(),
        'metadata': metadata,
        'minimized_testcase_size': minimized_testcase_size,
        'needs_refresh': needs_refresh,
        'original_testcase_size': original_testcase_size,
        'privileged_user': privileged_user,
        'regression': regression,
        'crash_stacktrace': {
            'lines':
            crash_stacktrace,
            'revision':
            revisions.get_real_revision(crash_revision,
                                        testcase.job_type,
                                        display=True,
                                        platform_id=testcase.platform_id)
        },
        'last_tested_crash_stacktrace': {
            'lines':
            last_tested_crash_stacktrace,
            'revision':
            revisions.get_real_revision(last_tested_crash_revision,
                                        testcase.job_type,
                                        display=True,
                                        platform_id=testcase.platform_id)
        },
        'security_severity': security_severity,
        'security_severities': data_types.SecuritySeverity.list(),
        'stats': {
            'min_hour': crash_stats.get_min_hour(),
            'max_hour': crash_stats.get_max_hour(),
        },
        'suspected_cls': _parse_suspected_cls(metadata.get('predator_result')),
        'testcase': testcase,
        'timestamp': utils.utc_datetime_to_timestamp(testcase.timestamp),
        'show_blame': testcase.has_blame(),
        'show_impact': testcase.has_impacts(),
        'impacts_production': testcase.impacts_production(),
        'find_similar_issues_options': FIND_SIMILAR_ISSUES_OPTIONS,
        'auto_delete_timestamp': auto_delete_timestamp,
        'auto_close_timestamp': auto_close_timestamp,
        'memory_tool_display_label': memory_tool_display_label,
        'memory_tool_display_value': memory_tool_display_value,
        'last_tested': last_tested,
        'is_admin_or_not_oss_fuzz': is_admin_or_not_oss_fuzz(),
        'has_issue_tracker': has_issue_tracker,
        'reproduction_help_url': reproduction_help_url,
        'is_local_development':
        environment.is_running_on_app_engine_development(),
        'fuzzer_display': fuzzer_display._asdict(),
    }