Пример #1
0
def to_dict(entity):
    """Convert a db.Model instance to a dict."""
    entity_dict = entity.to_dict()
    entity_dict['id'] = entity.key.id()

    for k, v in six.iteritems(entity_dict):
        if isinstance(v, datetime.datetime):
            entity_dict[k] = utils.utc_datetime_to_timestamp(v)

    return entity_dict
Пример #2
0
    def to_pubsub_message(self):
        """Convert the task to a pubsub message."""
        attributes = {
            'command': self.command,
            'argument': str(self.argument),
            'job': self.job,
        }

        if self.eta:
            attributes['eta'] = str(utils.utc_datetime_to_timestamp(self.eta))

        return pubsub.Message(attributes=attributes)
Пример #3
0
def get_result():
    """Get the result."""
    params = dict(request.iterparams())
    page = helpers.cast(request.get('page') or 1, int, "'page' is not an int.")

    query = datastore_query.Query(data_types.TestcaseUploadMetadata)
    query.order('timestamp', is_desc=True)

    if not access.has_access(need_privileged_access=True):
        query.filter('uploader_email', helpers.get_user_email())
        params['permission'] = {'uploaderEmail': helpers.get_user_email()}

    entities, total_pages, total_items, has_more = query.fetch_page(
        page=page, page_size=PAGE_SIZE, projection=None, more_limit=MORE_LIMIT)

    items = []
    for entity in entities:
        items.append({
            'timestamp':
            utils.utc_datetime_to_timestamp(entity.timestamp),
            'testcaseId':
            entity.testcase_id,
            'uploaderEmail':
            entity.uploader_email,
            'filename':
            entity.filename,
            'bundled':
            entity.bundled,
            'pathInArchive':
            entity.path_in_archive,
            'status':
            entity.status
        })

    attach_testcases(items)

    result = {
        'hasMore': has_more,
        'items': items,
        'page': page,
        'pageSize': PAGE_SIZE,
        'totalItems': total_items,
        'totalPages': total_pages,
    }
    return result, params
Пример #4
0
    def test_get(self):
        """Test get."""
        mtime = datetime.datetime(2019, 1, 1)
        mtime_seconds = utils.utc_datetime_to_timestamp(mtime)

        self.fs.create_file('/local/test-bucket/objects/a',
                            contents='a').st_mtime = mtime_seconds
        self.fs.create_file('/local/test-bucket/metadata/a',
                            contents='{"key": "value"}')

        self.assertDictEqual(
            {
                'bucket': 'test-bucket',
                'name': 'a',
                'size': 1,
                'updated': mtime,
                'metadata': {
                    'key': 'value'
                },
            }, self.provider.get('gs://test-bucket/a'))
Пример #5
0
def get_result():
    """Get the result for the testcase list page."""
    params = dict(request.iterparams())
    page = helpers.cast(request.get('page') or 1, int, "'page' is not an int.")

    query = datastore_query.Query(data_types.Testcase)
    crash_access.add_scope(query, params, 'security_flag', 'job_type',
                           'fuzzer_name_indices')
    add_filters(query, params)

    testcases, total_pages, total_items, has_more = query.fetch_page(
        page=page,
        page_size=PAGE_SIZE,
        projection=FIELDS,
        more_limit=MORE_LIMIT)

    items = []
    for testcase in testcases:
        regression_range = ''
        fixed_range = ''

        if testcase.regression and testcase.regression != 'NA':
            regression_range = testcase.regression
        if testcase.fixed and testcase.fixed != 'NA':
            fixed_range = testcase.fixed

        item = {
            'id': testcase.key.id(),
            'crashType': ' '.join(testcase.crash_type.splitlines()),
            'crashStateLines': testcase.crash_state.strip().splitlines(),
            'jobType': testcase.job_type,
            'isClosed': not testcase.open,
            'isFixed': testcase.fixed and testcase.fixed != 'NA',
            'isReproducible': not testcase.one_time_crasher_flag,
            'isSecurity': testcase.security_flag,
            'isImpactSet': testcase.is_impact_set_flag,
            'impacts': {
                'extendedStable': testcase.impact_extended_stable_version,
                'stable': testcase.impact_stable_version,
                'beta': testcase.impact_beta_version,
                'head': testcase.impact_head_version,
            },
            'regressionRange': regression_range,
            'fixedRange': fixed_range,
            'groupId': testcase.group_id,
            'projectName': testcase.project_name,
            'platform': testcase.platform,
            'issueId': testcase.bug_information
            or testcase.group_bug_information,
            'showImpacts': testcase.has_impacts(),
            'impactsProduction': testcase.impacts_production()
        }
        if testcase.timestamp:
            item['timestamp'] = utils.utc_datetime_to_timestamp(
                testcase.timestamp)

        items.append(item)

    helpers.log('Testcases', helpers.VIEW_OPERATION)

    result = {
        'hasMore': has_more,
        'items': items,
        'page': page,
        'pageSize': PAGE_SIZE,
        'totalItems': total_items,
        'totalPages': total_pages,
    }
    return result, params
Пример #6
0
    def test_list_blobs(self):
        """Test list_blobs."""
        mtime = datetime.datetime(2019, 1, 1)
        mtime_seconds = utils.utc_datetime_to_timestamp(mtime)

        self.fs.create_file('/local/test-bucket/objects/a',
                            st_size=11).st_mtime = mtime_seconds
        self.fs.create_file('/local/test-bucket/objects/b/c',
                            st_size=22).st_mtime = mtime_seconds
        self.fs.create_file('/local/test-bucket/objects/b/d/e',
                            st_size=33).st_mtime = mtime_seconds
        self.fs.create_file('/local/test-bucket/objects/f',
                            st_size=44).st_mtime = mtime_seconds
        self.fs.create_file(
            '/local/test-bucket/metadata/b/c',
            contents='{"key":"value"}').st_mtime = mtime_seconds

        result = list(self.provider.list_blobs('gs://test-bucket'))
        six.assertCountEqual(self, [{
            'bucket': 'test-bucket',
            'name': 'a',
            'updated': mtime,
            'size': 11,
            'metadata': {}
        }, {
            'bucket': 'test-bucket',
            'name': 'f',
            'updated': mtime,
            'size': 44,
            'metadata': {}
        }, {
            'bucket': 'test-bucket',
            'name': 'b/c',
            'updated': mtime,
            'size': 22,
            'metadata': {
                'key': 'value'
            }
        }, {
            'bucket': 'test-bucket',
            'name': 'b/d/e',
            'updated': mtime,
            'size': 33,
            'metadata': {}
        }], result)

        result = list(self.provider.list_blobs('gs://test-bucket/b'))
        six.assertCountEqual(self, [{
            'bucket': 'test-bucket',
            'name': 'b/c',
            'updated': mtime,
            'size': 22,
            'metadata': {
                'key': 'value'
            }
        }, {
            'bucket': 'test-bucket',
            'name': 'b/d/e',
            'updated': mtime,
            'size': 33,
            'metadata': {}
        }], result)

        result = list(self.provider.list_blobs('gs://test-bucket/b/d'))
        six.assertCountEqual(self, [{
            'bucket': 'test-bucket',
            'name': 'b/d/e',
            'updated': mtime,
            'size': 33,
            'metadata': {}
        }], result)

        result = list(
            self.provider.list_blobs('gs://test-bucket/', recursive=False))
        six.assertCountEqual(self, [{
            'bucket': 'test-bucket',
            'name': 'a',
            'updated': mtime,
            'size': 11,
            'metadata': {}
        }, {
            'bucket': 'test-bucket',
            'name': 'f',
            'updated': mtime,
            'size': 44,
            'metadata': {}
        }, {
            'bucket': 'test-bucket',
            'name': 'b',
        }], result)
Пример #7
0
def get_testcase_detail(testcase):
    """Get testcase detail for rendering the testcase detail page."""
    config = db_config.get()
    crash_address = testcase.crash_address
    crash_state = testcase.crash_state
    crash_state_lines = crash_state.strip().splitlines()
    crash_type = data_handler.get_crash_type_string(testcase)
    external_user = not access.has_access(job_type=testcase.job_type)
    issue_url = issue_tracker_utils.get_issue_url(testcase)
    metadata = testcase.get_metadata()
    original_testcase_size = _get_blob_size_string(testcase.fuzzed_keys)
    minimized_testcase_size = _get_blob_size_string(testcase.minimized_keys)
    has_issue_tracker = bool(data_handler.get_issue_tracker_name())

    fuzzer_display = data_handler.get_fuzzer_display(testcase)

    formatted_reproduction_help = _format_reproduction_help(
        data_handler.get_formatted_reproduction_help(testcase))
    # When we have a HELP_TEMPLATE, ignore any default values set for HELP_URL.
    if not formatted_reproduction_help:
        reproduction_help_url = data_handler.get_reproduction_help_url(
            testcase, config)
    else:
        reproduction_help_url = None

    if not testcase.regression:
        regression = 'Pending'
    elif testcase.regression == 'NA':
        regression = 'NA'
    else:
        regression = _get_revision_range_html_from_string(
            testcase.job_type, testcase.platform_id, testcase.regression)

    fixed_full = None
    if 'progression_pending' in metadata:
        fixed = 'Pending'
    elif not testcase.fixed:
        fixed = 'NO'
    elif testcase.fixed == 'NA':
        fixed = 'NA'
    elif testcase.fixed == 'Yes':
        fixed = 'YES'
    else:
        fixed = 'YES'
        fixed_full = _get_revision_range_html_from_string(
            testcase.job_type, testcase.platform_id, testcase.fixed)

    last_tested = None
    last_tested_revision = (metadata.get('last_tested_revision')
                            or testcase.crash_revision)
    if last_tested_revision:
        last_tested = _get_revision_range_html(testcase.job_type,
                                               testcase.platform_id,
                                               last_tested_revision)

    crash_revision = testcase.crash_revision
    crash_revisions_dict = revisions.get_component_revisions_dict(
        crash_revision, testcase.job_type, platform_id=testcase.platform_id)
    crash_stacktrace = data_handler.get_stacktrace(testcase)
    crash_stacktrace = filter_stacktrace(crash_stacktrace, testcase.crash_type,
                                         crash_revisions_dict,
                                         testcase.platform, testcase.job_type)
    crash_stacktrace = convert_to_lines(crash_stacktrace, crash_state_lines,
                                        crash_type)

    last_tested_crash_revision = metadata.get('last_tested_crash_revision')
    last_tested_crash_revisions_dict = revisions.get_component_revisions_dict(
        last_tested_crash_revision,
        testcase.job_type,
        platform_id=testcase.platform_id)
    last_tested_crash_stacktrace = data_handler.get_stacktrace(
        testcase, stack_attribute='last_tested_crash_stacktrace')
    last_tested_crash_stacktrace = filter_stacktrace(
        last_tested_crash_stacktrace, testcase.crash_type,
        last_tested_crash_revisions_dict, testcase.platform, testcase.job_type)
    last_tested_crash_stacktrace = convert_to_lines(
        last_tested_crash_stacktrace, crash_state_lines, crash_type)

    privileged_user = access.has_access(need_privileged_access=True)

    # Fix build url link. |storage.cloud.google.com| takes care of using the
    # right set of authentication credentials needed to access the link.
    if 'build_url' in metadata:
        metadata['build_url'] = metadata['build_url'].replace(
            'gs://', 'https://storage.cloud.google.com/')

    pending_blame_task = (testcase.has_blame() and 'blame_pending' in metadata
                          and metadata['blame_pending'])
    pending_impact_task = (testcase.has_impacts()
                           and not testcase.is_impact_set_flag)
    pending_minimize_task = not testcase.minimized_keys
    pending_progression_task = ('progression_pending' in metadata
                                and metadata['progression_pending'])
    pending_regression_task = not testcase.regression
    pending_stack_task = testcase.last_tested_crash_stacktrace == 'Pending'
    needs_refresh = (testcase.status == 'Pending' or (
        (testcase.status == 'Processed' or testcase.status == 'Duplicate') and
        (pending_blame_task or pending_impact_task or pending_minimize_task
         or pending_progression_task or pending_regression_task
         or pending_stack_task)))

    if data_types.SecuritySeverity.is_valid(testcase.security_severity):
        security_severity = severity_analyzer.severity_to_string(
            testcase.security_severity)
    else:
        security_severity = None

    auto_delete_timestamp = None
    auto_close_timestamp = None

    if testcase.one_time_crasher_flag:
        last_crash_time = (crash_stats.get_last_crash_time(testcase)
                           or testcase.timestamp)

        # Set auto-delete timestamp for unreproducible testcases with
        # no associated bug.
        if not testcase.bug_information:
            auto_delete_timestamp = utils.utc_datetime_to_timestamp(
                last_crash_time + datetime.timedelta(
                    days=data_types.UNREPRODUCIBLE_TESTCASE_NO_BUG_DEADLINE))

        # Set auto-close timestamp for unreproducible testcases with
        # an associated bug.
        if testcase.open and testcase.bug_information:
            auto_close_timestamp = utils.utc_datetime_to_timestamp(
                last_crash_time + datetime.timedelta(
                    days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE))

    memory_tool_display_string = environment.get_memory_tool_display_string(
        testcase.job_type)
    memory_tool_display_label = memory_tool_display_string.split(':')[0]
    memory_tool_display_value = memory_tool_display_string.split(
        ':')[1].strip()

    helpers.log('Testcase %s' % testcase.key.id(), helpers.VIEW_OPERATION)
    return {
        'id': testcase.key.id(),
        'crash_type': crash_type,
        'crash_address': crash_address,
        'crash_state': crash_state,  # Used by reproduce tool.
        'crash_state_lines': crash_state_lines,
        'crash_revision': testcase.crash_revision,
        'csrf_token': form.generate_csrf_token(),
        'external_user': external_user,
        'footer': testcase.comments,
        'formatted_reproduction_help': formatted_reproduction_help,
        'fixed': fixed,
        'fixed_full': fixed_full,
        'issue_url': issue_url,
        'is_admin': auth.is_current_user_admin(),
        'metadata': metadata,
        'minimized_testcase_size': minimized_testcase_size,
        'needs_refresh': needs_refresh,
        'original_testcase_size': original_testcase_size,
        'privileged_user': privileged_user,
        'regression': regression,
        'crash_stacktrace': {
            'lines':
            crash_stacktrace,
            'revision':
            revisions.get_real_revision(crash_revision,
                                        testcase.job_type,
                                        display=True,
                                        platform_id=testcase.platform_id)
        },
        'last_tested_crash_stacktrace': {
            'lines':
            last_tested_crash_stacktrace,
            'revision':
            revisions.get_real_revision(last_tested_crash_revision,
                                        testcase.job_type,
                                        display=True,
                                        platform_id=testcase.platform_id)
        },
        'security_severity': security_severity,
        'security_severities': data_types.SecuritySeverity.list(),
        'stats': {
            'min_hour': crash_stats.get_min_hour(),
            'max_hour': crash_stats.get_max_hour(),
        },
        'suspected_cls': _parse_suspected_cls(metadata.get('predator_result')),
        'testcase': testcase,
        'timestamp': utils.utc_datetime_to_timestamp(testcase.timestamp),
        'show_blame': testcase.has_blame(),
        'show_impact': testcase.has_impacts(),
        'impacts_production': testcase.impacts_production(),
        'find_similar_issues_options': FIND_SIMILAR_ISSUES_OPTIONS,
        'auto_delete_timestamp': auto_delete_timestamp,
        'auto_close_timestamp': auto_close_timestamp,
        'memory_tool_display_label': memory_tool_display_label,
        'memory_tool_display_value': memory_tool_display_value,
        'last_tested': last_tested,
        'is_admin_or_not_oss_fuzz': is_admin_or_not_oss_fuzz(),
        'has_issue_tracker': has_issue_tracker,
        'reproduction_help_url': reproduction_help_url,
        'is_local_development':
        environment.is_running_on_app_engine_development(),
        'fuzzer_display': fuzzer_display._asdict(),
    }