def get(self, testcase_id, issue_id): """Redirect user to the correct URL.""" testcase = helpers.get_testcase(testcase_id) issue_url = helpers.get_or_exit( lambda: issue_tracker_utils.get_issue_url(testcase), 'Issue tracker for testcase (id=%s) is not found.' % testcase_id, 'Failed to get the issue tracker URL.') self.redirect(issue_url + issue_id)
def get_testcase_detail(testcase): """Get testcase detail for rendering the testcase detail page.""" config = db_config.get() crash_address = testcase.crash_address crash_state = testcase.crash_state crash_state_lines = crash_state.strip().splitlines() crash_type = data_handler.get_crash_type_string(testcase) reproduction_help_url = data_handler.get_reproduction_help_url( testcase, config) external_user = not access.has_access(job_type=testcase.job_type) issue_url = issue_tracker_utils.get_issue_url(testcase) metadata = testcase.get_metadata() original_testcase_size = _get_blob_size_string(testcase.fuzzed_keys) minimized_testcase_size = _get_blob_size_string(testcase.minimized_keys) stack_clean_regex_lines = config.stack_clean_regex has_issue_tracker = bool(data_handler.get_issue_tracker_name()) if not testcase.regression: regression = 'Pending' elif testcase.regression == 'NA': regression = 'NA' else: regression = _get_revision_range_html_from_string(testcase.job_type, testcase.regression) fixed_full = None if 'progression_pending' in metadata: fixed = 'Pending' elif not testcase.fixed: fixed = 'NO' elif testcase.fixed == 'NA': fixed = 'NA' elif testcase.fixed == 'Yes': fixed = 'YES' else: fixed = 'YES' fixed_full = _get_revision_range_html_from_string(testcase.job_type, testcase.fixed) last_tested = None last_tested_revision = ( metadata.get('last_tested_revision') or testcase.crash_revision) if last_tested_revision: last_tested = _get_revision_range_html(testcase.job_type, last_tested_revision) crash_revision = testcase.crash_revision crash_revisions_dict = revisions.get_component_revisions_dict( crash_revision, testcase.job_type) crash_stacktrace = data_handler.get_stacktrace(testcase) crash_stacktrace = filter_stacktrace(crash_stacktrace, testcase.crash_type, stack_clean_regex_lines, crash_revisions_dict) crash_stacktrace = convert_to_lines(crash_stacktrace, crash_state_lines, crash_type) crash_stacktrace_preview_lines = _preview_stacktrace(crash_stacktrace) second_crash_stacktrace_revision = metadata.get( 'second_crash_stacktrace_revision') second_crash_stacktrace_revisions_dict = ( revisions.get_component_revisions_dict(second_crash_stacktrace_revision, testcase.job_type)) second_crash_stacktrace = data_handler.get_stacktrace( testcase, stack_attribute='second_crash_stacktrace') second_crash_stacktrace = filter_stacktrace( second_crash_stacktrace, testcase.crash_type, stack_clean_regex_lines, second_crash_stacktrace_revisions_dict) second_crash_stacktrace = convert_to_lines(second_crash_stacktrace, crash_state_lines, crash_type) second_crash_stacktrace_preview_lines = _preview_stacktrace( second_crash_stacktrace) last_tested_crash_revision = metadata.get('last_tested_crash_revision') last_tested_crash_revisions_dict = revisions.get_component_revisions_dict( last_tested_crash_revision, testcase.job_type) last_tested_crash_stacktrace = data_handler.get_stacktrace( testcase, stack_attribute='last_tested_crash_stacktrace') last_tested_crash_stacktrace = filter_stacktrace( last_tested_crash_stacktrace, testcase.crash_type, stack_clean_regex_lines, last_tested_crash_revisions_dict) last_tested_crash_stacktrace = convert_to_lines(last_tested_crash_stacktrace, crash_state_lines, crash_type) last_tested_crash_stacktrace_preview_lines = _preview_stacktrace( last_tested_crash_stacktrace) privileged_user = access.has_access(need_privileged_access=True) # Fix build url link. |storage.cloud.google.com| takes care of using the # right set of authentication credentials needed to access the link. if 'build_url' in metadata: metadata['build_url'] = metadata['build_url'].replace( 'gs://', 'https://storage.cloud.google.com/') pending_blame_task = ( testcase.has_blame() and 'blame_pending' in metadata and metadata['blame_pending']) pending_impact_task = ( testcase.has_impacts() and not testcase.is_impact_set_flag) pending_minimize_task = not testcase.minimized_keys pending_progression_task = ('progression_pending' in metadata and metadata['progression_pending']) pending_regression_task = not testcase.regression pending_stack_task = testcase.last_tested_crash_stacktrace == 'Pending' needs_refresh = ( testcase.status == 'Pending' or ((testcase.status == 'Processed' or testcase.status == 'Duplicate') and (pending_blame_task or pending_impact_task or pending_minimize_task or pending_progression_task or pending_regression_task or pending_stack_task))) if data_types.SecuritySeverity.is_valid(testcase.security_severity): security_severity = label_utils.severity_to_string( testcase.security_severity) else: security_severity = None auto_delete_timestamp = None auto_close_timestamp = None if testcase.one_time_crasher_flag: last_crash_time = ( crash_stats.get_last_crash_time(testcase) or testcase.timestamp) # Set auto-delete timestamp for unreproducible testcases with # no associated bug. if not testcase.bug_information: auto_delete_timestamp = utils.utc_datetime_to_timestamp( last_crash_time + datetime.timedelta( days=data_types.UNREPRODUCIBLE_TESTCASE_NO_BUG_DEADLINE)) # Set auto-close timestamp for unreproducible testcases with # an associated bug. if testcase.open and testcase.bug_information: auto_close_timestamp = utils.utc_datetime_to_timestamp( last_crash_time + datetime.timedelta( days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE)) memory_tool_display_string = environment.get_memory_tool_display_string( testcase.job_type) memory_tool_display_label = memory_tool_display_string.split(':')[0] memory_tool_display_value = memory_tool_display_string.split(':')[1].strip() helpers.log('Testcase %s' % testcase.key.id(), helpers.VIEW_OPERATION) return { 'id': testcase.key.id(), 'crash_type': crash_type, 'crash_address': crash_address, 'crash_state': crash_state, # Used by reproduce tool. 'crash_state_lines': crash_state_lines, 'crash_revision': testcase.crash_revision, 'csrf_token': form.generate_csrf_token(), 'external_user': external_user, 'footer': testcase.comments, 'fixed': fixed, 'fixed_full': fixed_full, 'issue_url': issue_url, 'is_admin': users.is_current_user_admin(), 'metadata': metadata, 'minimized_testcase_size': minimized_testcase_size, 'needs_refresh': needs_refresh, 'original_testcase_size': original_testcase_size, 'privileged_user': privileged_user, 'regression': regression, 'crash_stacktrace': { 'lines': crash_stacktrace, 'preview_lines': crash_stacktrace_preview_lines, 'revision': revisions.get_real_revision( crash_revision, testcase.job_type, display=True) }, 'second_crash_stacktrace': { 'lines': second_crash_stacktrace, 'preview_lines': second_crash_stacktrace_preview_lines, 'revision': revisions.get_real_revision( second_crash_stacktrace_revision, testcase.job_type, display=True) }, 'last_tested_crash_stacktrace': { 'lines': last_tested_crash_stacktrace, 'preview_lines': last_tested_crash_stacktrace_preview_lines, 'revision': revisions.get_real_revision( last_tested_crash_revision, testcase.job_type, display=True) }, 'security_severity': security_severity, 'security_severities': data_types.SecuritySeverity.list(), 'stats': { 'min_hour': crash_stats.get_min_hour(), 'max_hour': crash_stats.get_max_hour(), }, 'suspected_cls': _parse_suspected_cls(metadata.get('predator_result')), 'testcase': testcase, 'timestamp': utils.utc_datetime_to_timestamp(testcase.timestamp), 'show_blame': testcase.has_blame(), 'show_impact': testcase.has_impacts(), 'impacts_production': testcase.impacts_production(), 'find_similar_issues_options': FIND_SIMILAR_ISSUES_OPTIONS, 'auto_delete_timestamp': auto_delete_timestamp, 'auto_close_timestamp': auto_close_timestamp, 'memory_tool_display_label': memory_tool_display_label, 'memory_tool_display_value': memory_tool_display_value, 'last_tested': last_tested, 'is_admin_or_not_oss_fuzz': is_admin_or_not_oss_fuzz(), 'has_issue_tracker': has_issue_tracker, 'reproduction_help_url': reproduction_help_url, }