def get_result(): """Get the result for the crash stats page.""" params = dict(request.iterparams()) params['type'] = params.get('type', 'regression') page = helpers.cast(request.get('page') or 1, int, "'page' is not an int.") is_revision_empty = 'revision' not in params query = big_query_query.Query() crash_access.add_scope(query, params, 'security_flag', 'job_type', 'fuzzer_name') if is_revision_empty: total_count = 0 rows = [] else: filters.add(query, params, FILTERS) rows, total_count = get( params=params, query=query, offset=(page - 1) * PAGE_SIZE, limit=PAGE_SIZE) helpers.log('Regression', helpers.VIEW_OPERATION) result = { 'totalPages': (total_count // PAGE_SIZE) + 1, 'page': page, 'pageSize': PAGE_SIZE, 'items': rows, 'totalCount': total_count, 'isRevisionEmpty': is_revision_empty } return result, params
def get_result(): """Get the result.""" params = dict(request.iterparams()) page = helpers.cast(request.get('page') or 1, int, "'page' is not an int.") query = datastore_query.Query(data_types.TestcaseUploadMetadata) query.order('timestamp', is_desc=True) if not access.has_access(need_privileged_access=True): query.filter('uploader_email', helpers.get_user_email()) params['permission'] = {'uploaderEmail': helpers.get_user_email()} entities, total_pages, total_items, has_more = query.fetch_page( page=page, page_size=PAGE_SIZE, projection=None, more_limit=MORE_LIMIT) items = [] for entity in entities: items.append({ 'timestamp': utils.utc_datetime_to_timestamp(entity.timestamp), 'testcaseId': entity.testcase_id, 'uploaderEmail': entity.uploader_email, 'filename': entity.filename, 'bundled': entity.bundled, 'pathInArchive': entity.path_in_archive, 'status': entity.status }) attach_testcases(items) result = { 'hasMore': has_more, 'items': items, 'page': page, 'pageSize': PAGE_SIZE, 'totalItems': total_items, 'totalPages': total_pages, } return result, params
def get_result(): """Get the result for the crash stats page.""" params = dict(request.iterparams()) page = helpers.cast(request.get('page') or 1, int, "'page' is not an int.") group_by = params.get('group', 'platform') params['group'] = group_by sort_by = params.get('sort', 'total_count') params['sort'] = sort_by params['number'] = params.get('number', 'count') # Conditions for individual records. query = crash_stats.Query() query.group_by = group_by query.sort_by = sort_by crash_access.add_scope(query, params, 'security_flag', 'job_type', 'fuzzer_name') filters.add(query, params, FILTERS) # Conditions after grouping. group_query = crash_stats.Query() filters.add(group_query, params, GROUP_FILTERS) try: total_count, rows = crash_stats.get( query=query, group_query=group_query, offset=(page - 1) * PAGE_SIZE, limit=PAGE_SIZE) except ValueError: raise helpers.EarlyExitException('Invalid filters', 400) attach_testcases(rows) helpers.log('CrashStats', helpers.VIEW_OPERATION) result = { 'totalPages': (total_count // PAGE_SIZE) + 1, 'page': page, 'pageSize': PAGE_SIZE, 'items': rows, 'totalCount': total_count } return result, params
def get_results(): """Get results for the jobs page.""" # Return jobs sorted alphabetically by name query = datastore_query.Query(data_types.Job) query.order('name', is_desc=False) params = dict(request.iterparams()) filters.add(query, params, FILTERS) page = helpers.cast(request.get('page', 1), int, "'page' is not an int.") items, total_pages, total_items, has_more = query.fetch_page( page=page, page_size=PAGE_SIZE, projection=None, more_limit=MORE_LIMIT) helpers.log('Jobs', helpers.VIEW_OPERATION) result = { 'hasMore': has_more, 'items': [_job_to_dict(item) for item in items], 'page': page, 'pageSize': PAGE_SIZE, 'totalItems': total_items, 'totalPages': total_pages, } return result, params
def get_result(): """Get the result for the testcase list page.""" params = dict(request.iterparams()) page = helpers.cast(request.get('page') or 1, int, "'page' is not an int.") query = datastore_query.Query(data_types.Testcase) crash_access.add_scope(query, params, 'security_flag', 'job_type', 'fuzzer_name_indices') add_filters(query, params) testcases, total_pages, total_items, has_more = query.fetch_page( page=page, page_size=PAGE_SIZE, projection=FIELDS, more_limit=MORE_LIMIT) items = [] for testcase in testcases: regression_range = '' fixed_range = '' if testcase.regression and testcase.regression != 'NA': regression_range = testcase.regression if testcase.fixed and testcase.fixed != 'NA': fixed_range = testcase.fixed item = { 'id': testcase.key.id(), 'crashType': ' '.join(testcase.crash_type.splitlines()), 'crashStateLines': testcase.crash_state.strip().splitlines(), 'jobType': testcase.job_type, 'isClosed': not testcase.open, 'isFixed': testcase.fixed and testcase.fixed != 'NA', 'isReproducible': not testcase.one_time_crasher_flag, 'isSecurity': testcase.security_flag, 'isImpactSet': testcase.is_impact_set_flag, 'impacts': { 'extendedStable': testcase.impact_extended_stable_version, 'stable': testcase.impact_stable_version, 'beta': testcase.impact_beta_version, 'head': testcase.impact_head_version, }, 'regressionRange': regression_range, 'fixedRange': fixed_range, 'groupId': testcase.group_id, 'projectName': testcase.project_name, 'platform': testcase.platform, 'issueId': testcase.bug_information or testcase.group_bug_information, 'showImpacts': testcase.has_impacts(), 'impactsProduction': testcase.impacts_production() } if testcase.timestamp: item['timestamp'] = utils.utc_datetime_to_timestamp( testcase.timestamp) items.append(item) helpers.log('Testcases', helpers.VIEW_OPERATION) result = { 'hasMore': has_more, 'items': items, 'page': page, 'pageSize': PAGE_SIZE, 'totalItems': total_items, 'totalPages': total_pages, } return result, params