def get(self): """GET handler.""" if not utils.is_oss_fuzz(): self.redirect('/testcases') return self.render('oss-fuzz-home.html', get_results())
def render(self, path, values=None, status=200): """Write HTML response.""" if values is None: values = {} values['menu_items'] = _MENU_ITEMS values['is_oss_fuzz'] = utils.is_oss_fuzz() values['is_development'] = ( environment.is_running_on_app_engine_development()) values['is_logged_in'] = bool(helpers.get_user_email()) # Only track analytics for non-admin users. values['ga_tracking_id'] = ( local_config.GAEConfig().get('ga_tracking_id') if not auth.is_current_user_admin() else None) if values['is_logged_in']: values['switch_account_url'] = make_login_url(self.request.url) values['logout_url'] = make_logout_url(dest_url=self.request.url) template = _JINJA_ENVIRONMENT.get_template(path) self._add_security_response_headers() self.response.headers['Content-Type'] = 'text/html' self.response.out.write(template.render(values)) self.response.set_status(status)
def update_issue_ccs_from_owners_file(policy, testcase, issue): """Add cc to an issue based on owners list from owners file. This is currently applicable to fuzz targets only.""" auto_cc_label = policy.label("auto_cc_from_owners") if not auto_cc_label: return if not issue or not issue.is_open: return if testcase.get_metadata("has_issue_ccs_from_owners_file"): return ccs_list = utils.parse_delimited( testcase.get_metadata("issue_owners", ""), delimiter=",", strip=True, remove_empty=True, ) if not ccs_list: return # If we've assigned the ccs before, it likely means we were incorrect. # Don't try again for this particular issue. if issue_tracker_utils.was_label_added(issue, auto_cc_label): return ccs_added = False actions = list(issue.actions) for cc in random.sample(ccs_list, min(AUTO_CC_LIMIT, len(ccs_list))): if cc in issue.ccs: continue # If cc was previously manually removed from the cc list, we assume that # they were incorrectly added. Don't try to add them again. cc_was_removed = any(cc in action.ccs.removed for action in actions) if cc_was_removed: continue issue.ccs.add(cc) ccs_added = True if not ccs_added: # Everyone we'd expect to see has already been cced on the issue. No need # to spam it with another comment. Also, set the metadata to avoid doing # this again. testcase.set_metadata("has_issue_ccs_from_owners_file", True) return issue_comment = ( "Automatically adding ccs based on OWNERS file / target commit history.") if utils.is_oss_fuzz(): issue_comment += OSS_FUZZ_INCORRECT_COMMENT + "." else: issue_comment = _append_generic_incorrect_comment(issue_comment, policy, issue, ".") issue.labels.add(auto_cc_label) issue.save(new_comment=issue_comment, notify=True)
def update_fuzz_blocker_label(policy, testcase, issue, top_crashes_by_project_and_platform_map): """Add top crash label to issue.""" fuzz_blocker_label = policy.label('fuzz_blocker') if not fuzz_blocker_label: return if not issue: return if not testcase.open: return top_crash_platforms = get_top_crash_platforms( testcase, top_crashes_by_project_and_platform_map) if not top_crash_platforms: # Not a top crasher, bail out. return if issue_tracker_utils.was_label_added(issue, fuzz_blocker_label): # Issue was already marked a top crasher, bail out. return if len(top_crash_platforms) == 1: platform_message = '%s platform' % top_crash_platforms[0] else: platform_message = '%s and %s platforms' % (', '.join( top_crash_platforms[:-1]), top_crash_platforms[-1]) fuzzer_name = ( testcase.get_metadata('fuzzer_binary_name') or testcase.fuzzer_name) update_message = ( 'This crash occurs very frequently on %s and is likely preventing the ' 'fuzzer %s from making much progress. Fixing this will allow more bugs ' 'to be found.' % (platform_message, fuzzer_name)) if utils.is_oss_fuzz(): update_message += OSS_FUZZ_INCORRECT_COMMENT elif utils.is_chromium(): update_message += '\n\nMarking this bug as a blocker for next Beta release.' update_message = _append_generic_incorrect_comment( update_message, policy, issue, ' and remove the {label_text}.'.format( label_text=issue.issue_tracker.label_text( data_types.CHROMIUM_ISSUE_RELEASEBLOCK_BETA_LABEL))) issue.labels.add(data_types.CHROMIUM_ISSUE_RELEASEBLOCK_BETA_LABEL) # Update with the next beta for trunk, and remove existing milestone label. beta_milestone_label = ( 'M-%d' % build_info.get_release_milestone('head', testcase.platform)) if beta_milestone_label not in issue.labels: issue.labels.remove_by_prefix('M-') issue.labels.add(beta_milestone_label) logs.log(update_message) issue.labels.add(fuzz_blocker_label) issue.save(new_comment=update_message, notify=True)
def get(self, resource=None): """Handle a get request with resource.""" testcase = None testcase_id = request.args.get('testcase_id') if not testcase_id and not resource: raise helpers.EarlyExitException('No file requested.', 400) if testcase_id: try: testcase = data_handler.get_testcase_by_id(testcase_id) except errors.InvalidTestcaseError: raise helpers.EarlyExitException('Invalid testcase.', 400) if not resource: if testcase.minimized_keys and testcase.minimized_keys != 'NA': resource = testcase.minimized_keys else: resource = testcase.fuzzed_keys fuzzer_binary_name = None if testcase: fuzzer_binary_name = testcase.get_metadata('fuzzer_binary_name') resource = str(urllib.parse.unquote(resource)) blob_info = blobs.get_blob_info(resource) if not blob_info: raise helpers.EarlyExitException('File does not exist.', 400) if (testcase and testcase.fuzzed_keys != blob_info.key() and testcase.minimized_keys != blob_info.key()): raise helpers.EarlyExitException('Invalid testcase.', 400) if (utils.is_oss_fuzz() and testcase and self.check_public_testcase(blob_info, testcase)): # Public OSS-Fuzz testcase. return self._send_blob( blob_info, testcase.key.id(), is_minimized=True, fuzzer_binary_name=fuzzer_binary_name) is_minimized = testcase and blob_info.key() == testcase.minimized_keys if access.has_access(): # User has general access. return self._send_blob(blob_info, testcase_id, is_minimized, fuzzer_binary_name) # If this blobstore file is for a testcase, check if the user has access to # the testcase. if not testcase: raise helpers.AccessDeniedException() if access.can_user_access_testcase(testcase): return self._send_blob(blob_info, testcase_id, is_minimized, fuzzer_binary_name) raise helpers.AccessDeniedException()
def _allow_unprivileged_metadata(testcase_metadata): """Returns whether or not the provided testcase metadata can be set by an unprivileged user.""" if utils.is_oss_fuzz(): # Labels in OSS-Fuzz are privileged and control things like disclosure # deadlines. Do not let these be editable. return False # Allow *only* issue labels to be set. return len(testcase_metadata) == 1 and 'issue_labels' in testcase_metadata
def get(self): """Handle a get request.""" if utils.is_oss_fuzz(): manager_class = OssFuzzClustersManager else: manager_class = ClustersManager for project_id in _get_project_ids(): manager = manager_class(project_id) manager.update_clusters()
def update_issue_ccs_from_owners_file(testcase, issue): """Add cc to an issue based on owners list from owners file. This is currently applicable to fuzz targets only.""" if not issue or not issue.open: return # If we've assigned the ccs before, it likely means we were incorrect. # Don't try again for this particular issue. if issue.has_comment_with_label( data_types.ISSUE_CLUSTERFUZZ_AUTO_CC_LABEL): return if testcase.get_metadata('has_issue_ccs_from_owners_file'): return ccs_list = utils.parse_delimited(testcase.get_metadata('issue_owners', ''), delimiter=',', strip=True, remove_empty=True) if not ccs_list: return ccs_added = False comments = issue.get_comments() for cc in random.sample(ccs_list, min(AUTO_CC_LIMIT, len(ccs_list))): if issue.has_cc(cc): continue # If cc was previously manually removed from the cc list, we assume that # they were incorrectly added. Don't try to add them again. cc_was_removed = any( ('-%s' % cc) in comment.cc for comment in comments) if cc_was_removed: continue issue.add_cc(cc) ccs_added = True if not ccs_added: # Everyone we'd expect to see has already been cced on the issue. No need # to spam it with another comment. Also, set the metadata to avoid doing # this again. testcase.set_metadata('has_issue_ccs_from_owners_file', True) return issue.comment = ( 'Automatically adding ccs based on OWNERS file / target commit history.' ) if utils.is_oss_fuzz(): issue.comment += OSS_FUZZ_INCORRECT_COMMENT else: issue.comment += INTERNAL_INCORRECT_COMMENT issue.comment += '.' issue.add_label(data_types.ISSUE_CLUSTERFUZZ_AUTO_CC_LABEL) issue.save(send_email=True)
def get(self): """Render the bot list HTML.""" if utils.is_oss_fuzz(): heartbeats = _get_host_workers_heartbeats() else: heartbeats = ndb_utils.get_all_from_model(data_types.Heartbeat) bots = _convert_heartbeats_to_dicts(heartbeats) self.render('bots.html', { 'bots': bots, })
def update_fuzz_blocker_label(testcase, issue, top_crashes_by_project_and_platform_map): """Add top crash label to issue.""" if not issue: return if not testcase.open: return top_crash_platforms = get_top_crash_platforms( testcase, top_crashes_by_project_and_platform_map) if not top_crash_platforms: # Not a top crasher, bail out. return if issue.has_comment_with_label(data_types.ISSUE_FUZZ_BLOCKER_LABEL): # Issue was already marked a top crasher, bail out. return if len(top_crash_platforms) == 1: platform_message = '%s platform' % top_crash_platforms[0] else: platform_message = '%s and %s platforms' % (', '.join( top_crash_platforms[:-1]), top_crash_platforms[-1]) fuzzer_name = (testcase.get_metadata('fuzzer_binary_name') or testcase.fuzzer_name) update_message = ( 'This crash occurs very frequently on %s and is likely preventing the ' 'fuzzer %s from making much progress. Fixing this will allow more bugs ' 'to be found.' % (platform_message, fuzzer_name)) if utils.is_oss_fuzz(): update_message += OSS_FUZZ_INCORRECT_COMMENT else: update_message += '\n\nMarking this bug as a blocker for next Beta release.' update_message += INTERNAL_INCORRECT_COMMENT update_message += (' and remove the %s label.' % data_types.ISSUE_RELEASEBLOCK_BETA_LABEL) issue.add_label(data_types.ISSUE_RELEASEBLOCK_BETA_LABEL) # Update with the next beta for trunk, and remove existing milestone label. beta_milestone_label = ( 'M-%d' % build_info.get_release_milestone('head', testcase.platform)) if not issue.has_label(beta_milestone_label): issue.remove_label_by_prefix('M-') issue.add_label(beta_milestone_label) logs.log(update_message) issue.add_label(data_types.ISSUE_FUZZ_BLOCKER_LABEL) issue.comment = update_message issue.save(send_email=True)
def _unpack_build(base_build_dir, build_dir, build_url, target_weights=None): """Unpacks a build from a build url into the build directory.""" # Track time taken to unpack builds so that it doesn't silently regress. start_time = time.time() # Free up memory. utils.python_gc() # Remove the current build. logs.log('Removing build directory %s.' % build_dir) if not shell.remove_directory(build_dir, recreate=True): logs.log_error('Unable to clear build directory %s.' % build_dir) _handle_unrecoverable_error_on_windows() return False # Decide whether to use cache build archives or not. use_cache = environment.get_value('CACHE_STORE', False) # Download build archive locally. build_local_archive = os.path.join(build_dir, os.path.basename(build_url)) # Make the disk space necessary for the archive available. archive_size = storage.get_download_file_size( build_url, build_local_archive, use_cache=True) if archive_size is not None and not _make_space(archive_size, base_build_dir): shell.clear_data_directories() logs.log_fatal_and_exit( 'Failed to make space for download. ' 'Cleared all data directories to free up space, exiting.') logs.log('Downloading build from url %s.' % build_url) try: storage.copy_file_from(build_url, build_local_archive, use_cache=use_cache) except: logs.log_error('Unable to download build url %s.' % build_url) return False unpack_everything = environment.get_value('UNPACK_ALL_FUZZ_TARGETS_AND_FILES') if not unpack_everything: # For fuzzing, pick a random fuzz target so that we only un-archive that # particular fuzz target and its dependencies and save disk space. # If we are going to unpack everythng in archive based on # |UNPACK_ALL_FUZZ_TARGETS_AND_FILES| in the job defition, then don't set a # random fuzz target before we've unpacked the build. It won't actually save # us anything in this case and can be really expensive for large builds # (such as Chrome OS). Defer setting it until after the build has been # unpacked. _set_random_fuzz_target_for_fuzzing_if_needed( _get_fuzz_targets_from_archive(build_local_archive), target_weights) # Actual list of files to unpack can be smaller if we are only unarchiving # a particular fuzz target. file_match_callback = _get_file_match_callback() assert not (unpack_everything and file_match_callback is not None) if not _make_space_for_build(build_local_archive, base_build_dir, file_match_callback): shell.clear_data_directories() logs.log_fatal_and_exit( 'Failed to make space for build. ' 'Cleared all data directories to free up space, exiting.') # Unpack the local build archive. logs.log('Unpacking build archive %s.' % build_local_archive) trusted = not utils.is_oss_fuzz() try: archive.unpack( build_local_archive, build_dir, trusted=trusted, file_match_callback=file_match_callback) except: logs.log_error('Unable to unpack build archive %s.' % build_local_archive) return False if unpack_everything: # Set a random fuzz target now that the build has been unpacked, if we # didn't set one earlier. _set_random_fuzz_target_for_fuzzing_if_needed( _get_fuzz_targets_from_dir(build_dir), target_weights) # If this is partial build due to selected build files, then mark it as such # so that it is not re-used. if file_match_callback: partial_build_file_path = os.path.join(build_dir, PARTIAL_BUILD_FILE) utils.write_data_to_file('', partial_build_file_path) # No point in keeping the archive around. shell.remove_file(build_local_archive) end_time = time.time() elapsed_time = end_time - start_time log_func = logs.log_warn if elapsed_time > UNPACK_TIME_LIMIT else logs.log log_func('Build took %0.02f minutes to unpack.' % (elapsed_time / 60.)) return True
self.redirect('https://' + to_domain + self.request.path_qs, permanent=True) return RedirectHandler # Add item to the navigation menu. Order is important. base_handler.add_menu('Testcases', '/testcases') base_handler.add_menu('Fuzzer Statistics', '/fuzzer-stats') base_handler.add_menu('Crash Statistics', '/crash-stats') base_handler.add_menu('Upload Testcase', '/upload-testcase') if utils.is_chromium(): base_handler.add_menu('Crashes by range', '/commit-range') if not utils.is_oss_fuzz(): base_handler.add_menu('Fuzzers', '/fuzzers') base_handler.add_menu('Corpora', '/corpora') base_handler.add_menu('Bots', '/bots') base_handler.add_menu('Jobs', '/jobs') base_handler.add_menu('Configuration', '/configuration') base_handler.add_menu('Report Bug', '/report-bug') base_handler.add_menu('Documentation', '/docs') # We need to separate routes for cron to avoid redirection. _CRON_ROUTES = [ ('/backup', backup.Handler), ('/build-crash-stats', build_crash_stats.Handler), ('/cleanup', cleanup.Handler), ('/corpus-backup/make-public', corpus_backup.MakePublicHandler),
def mark_unreproducible_testcase_and_issue_as_closed_after_deadline( testcase, issue): """Closes an unreproducible testcase and its associated issue after a certain time period.""" # If the testcase is already closed, no more work to do. if not testcase.open: return # Check testcase status, so as to skip unreproducible uploads. if testcase.status not in ['Processed', 'Duplicate']: return # Make sure that this testcase is an unreproducible bug. If not, bail out. if not testcase.one_time_crasher_flag: return # Make sure that this testcase has an associated bug. If not, bail out. if not testcase.bug_information: return # If this testcase was manually uploaded, don't change issue state as our # reproduction result might be incorrect. if testcase.uploader_email: return # Make sure that there is an associated bug and it is in open state. if not issue or not issue.open: return # Check if there are any reproducible open testcases are associated with # this bug. If yes, return. similar_testcase = data_types.Testcase.query( data_types.Testcase.bug_information == testcase.bug_information, ndb_utils.is_true(data_types.Testcase.open), ndb_utils.is_false(data_types.Testcase.one_time_crasher_flag)).get() if similar_testcase: return # Make sure that testcase is atleast older than # |UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE|, otherwise it will be seen in # crash stats anyway. if (testcase.timestamp and not dates.time_has_expired( testcase.timestamp, days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE)): return # Handle testcase that turned from reproducible to unreproducible. Account # for the recent progression task run time. last_tested_crash_time = testcase.get_metadata('last_tested_crash_time') if (last_tested_crash_time and not dates.time_has_expired( last_tested_crash_time, days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE)): return # Make that there is no crash seen in the deadline period. if get_crash_occurrence_platforms( testcase, data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE): return # As a last check, do the expensive call of actually checking all issue # comments to make sure we we didn't get called out on issue mistriage. if issue.has_comment_with_label(data_types.ISSUE_MISTRIAGED_LABEL): return # Close associated issue and testcase. comment = ('ClusterFuzz testcase %d is flaky and no longer crashes, ' 'so closing issue.' % testcase.key.id()) if utils.is_oss_fuzz(): comment += OSS_FUZZ_INCORRECT_COMMENT else: comment += INTERNAL_INCORRECT_COMMENT comment += ' and re-open the issue.' issue.comment = comment issue.status = 'WontFix' issue.open = False issue.save(send_email=True) testcase.fixed = 'NA' testcase.open = False testcase.put() logs.log('Closed unreproducible testcase %d and associated issue.' % testcase.key.id())
def mark_issue_as_closed_if_testcase_is_fixed(testcase, issue): """Mark an issue as fixed if all of its associated reproducible testcase are fixed.""" # If there is no associated issue, then bail out. if not issue or not testcase.bug_information: return # If the issue is closed in a status other than Fixed, like Duplicate, WontFix # or Archived, we shouldn't change it. Bail out. if not issue.open and issue.status != 'Fixed': return # Check testcase status, so as to skip unreproducible uploads. if testcase.status not in ['Processed', 'Duplicate']: return # If the testcase is still open, no work needs to be done. Bail out. if testcase.open: return # FIXME: Find a better solution to skip over reproducible tests that are now # showing up a flaky (esp when we are unable to reproduce crash in original # crash revision). if testcase.fixed == 'NA': return # We can only verify fixed issues for reproducible testcases. If the testcase # is unreproducible, bail out. Exception is if we explicitly marked this as # fixed. if testcase.one_time_crasher_flag and testcase.fixed != 'Yes': return # Make sure that no other testcases associated with this issue are open. similar_testcase = data_types.Testcase.query( data_types.Testcase.bug_information == testcase.bug_information, ndb_utils.is_true(data_types.Testcase.open), ndb_utils.is_false(data_types.Testcase.one_time_crasher_flag)).get() if similar_testcase: return # As a last check, do the expensive call of actually checking all issue # comments to make sure we didn't do the verification already and we didn't # get called out on issue mistriage. if (issue.has_comment_with_label(data_types.ISSUE_VERIFIED_LABEL) or issue.has_comment_with_label(data_types.ISSUE_MISTRIAGED_LABEL)): return issue.add_label(data_types.ISSUE_VERIFIED_LABEL) comment = ('ClusterFuzz testcase %d is verified as fixed, ' 'so closing issue as verified.' % testcase.key.id()) if utils.is_oss_fuzz(): comment += OSS_FUZZ_INCORRECT_COMMENT else: comment += INTERNAL_INCORRECT_COMMENT comment += ' and re-open the issue.' issue.comment = comment issue.status = 'Verified' issue.open = False issue.save(send_email=True) logs.log('Closed issue %d for fixed testcase %d.' % (issue.id, testcase.key.id()))
def is_admin_or_not_oss_fuzz(): """Return True if the current user is an admin or if this is not OSS-Fuzz.""" return not utils.is_oss_fuzz() or auth.is_current_user_admin()
def mark_issue_as_closed_if_testcase_is_fixed(policy, testcase, issue): """Mark an issue as fixed if all of its associated reproducible testcase are fixed.""" verified_label = policy.label('verified') if not verified_label: return # If there is no associated issue, then bail out. if not issue or not testcase.bug_information: return # If the issue is closed in a status other than Fixed, like Duplicate, WontFix # or Archived, we shouldn't change it. Bail out. if not issue.is_open and issue.status != policy.status('fixed'): return # Check testcase status, so as to skip unreproducible uploads. if testcase.status not in ['Processed', 'Duplicate']: return # If the testcase is still open, no work needs to be done. Bail out. if testcase.open: return # FIXME: Find a better solution to skip over reproducible tests that are now # showing up a flaky (esp when we are unable to reproduce crash in original # crash revision). if testcase.fixed == 'NA': return # We can only verify fixed issues for reproducible testcases. If the testcase # is unreproducible, bail out. Exception is if we explicitly marked this as # fixed. if testcase.one_time_crasher_flag and testcase.fixed != 'Yes': return # Make sure that no other testcases associated with this issue are open. similar_testcase = data_types.Testcase.query( data_types.Testcase.bug_information == testcase.bug_information, ndb_utils.is_true(data_types.Testcase.open), ndb_utils.is_false(data_types.Testcase.one_time_crasher_flag)).get() if similar_testcase: return # As a last check, do the expensive call of actually checking all issue # comments to make sure we didn't do the verification already and we didn't # get called out on issue mistriage. if (issue_tracker_utils.was_label_added(issue, verified_label) or issue_tracker_utils.was_label_added(issue, policy.label('wrong'))): return issue.labels.add(verified_label) comment = 'ClusterFuzz testcase %d is verified as fixed' % testcase.key.id() fixed_range_url = data_handler.get_fixed_range_url(testcase) if fixed_range_url: comment += ' in ' + fixed_range_url else: comment += '.' if utils.is_oss_fuzz(): comment += OSS_FUZZ_INCORRECT_COMMENT else: comment = _append_generic_incorrect_comment(comment, policy, issue, ' and re-open the issue.') skip_auto_close = data_handler.get_value_from_job_definition( testcase.job_type, 'SKIP_AUTO_CLOSE_ISSUE') if not skip_auto_close: issue.status = policy.status('verified') issue.save(new_comment=comment, notify=True) logs.log('Mark issue %d as verified for fixed testcase %d.' % (issue.id, testcase.key.id()))
def do_libfuzzer_minimization(testcase, testcase_file_path): """Use libFuzzer's built-in minimizer where appropriate.""" is_overriden_job = bool(environment.get_value('ORIGINAL_JOB_NAME')) def handle_unreproducible(): # Be more lenient with marking testcases as unreproducible when this is a # job override. if is_overriden_job: _skip_minimization(testcase, 'Unreproducible on overridden job.') else: task_creation.mark_unreproducible_if_flaky(testcase, True) timeout = environment.get_value('LIBFUZZER_MINIMIZATION_TIMEOUT', 180) rounds = environment.get_value('LIBFUZZER_MINIMIZATION_ROUNDS', 10) current_testcase_path = testcase_file_path last_crash_result = None # Get initial crash state. initial_crash_result = _run_libfuzzer_testcase(testcase, testcase_file_path) if not initial_crash_result.is_crash(): logs.log_warn('Did not crash. Output:\n' + initial_crash_result.get_stacktrace(symbolized=True)) handle_unreproducible() return if testcase.security_flag != initial_crash_result.is_security_issue(): logs.log_warn('Security flag does not match.') handle_unreproducible() return task_creation.mark_unreproducible_if_flaky(testcase, False) expected_state = initial_crash_result.get_symbolized_data() logs.log('Initial crash state: %s\n' % expected_state.crash_state) # We attempt minimization multiple times in case one round results in an # incorrect state, or runs into another issue such as a slow unit. for round_number in range(1, rounds + 1): logs.log('Minimizing round %d.' % round_number) output_file_path, crash_result = _run_libfuzzer_tool( 'minimize', testcase, current_testcase_path, timeout, expected_state.crash_state, set_dedup_flags=True) if output_file_path: last_crash_result = crash_result current_testcase_path = output_file_path if not last_crash_result: repro_command = tests.get_command_line_for_application( file_to_run=testcase_file_path, needs_http=testcase.http_flag) _skip_minimization( testcase, 'LibFuzzer minimization failed.', crash_result=initial_crash_result, command=repro_command) return logs.log('LibFuzzer minimization succeeded.') if utils.is_oss_fuzz(): # Scrub the testcase of non-essential data. cleansed_testcase_path = do_libfuzzer_cleanse( testcase, current_testcase_path, expected_state.crash_state) if cleansed_testcase_path: current_testcase_path = cleansed_testcase_path # Finalize the test case if we were able to reproduce it. repro_command = tests.get_command_line_for_application( file_to_run=current_testcase_path, needs_http=testcase.http_flag) finalize_testcase(testcase.key.id(), repro_command, last_crash_result) # Clean up after we're done. shell.clear_testcase_directories()
from handlers.testcase_detail import download_testcase from handlers.testcase_detail import find_similar_issues from handlers.testcase_detail import mark_fixed from handlers.testcase_detail import mark_security from handlers.testcase_detail import mark_unconfirmed from handlers.testcase_detail import redo from handlers.testcase_detail import remove_duplicate from handlers.testcase_detail import remove_group from handlers.testcase_detail import remove_issue from handlers.testcase_detail import testcase_variants from handlers.testcase_detail import update_from_trunk from handlers.testcase_detail import update_issue from metrics import logs _is_chromium = utils.is_chromium() _is_oss_fuzz = utils.is_oss_fuzz() class _TrailingSlashRemover(webapp2.RequestHandler): def get(self, url): self.redirect(url) def redirect_to(to_domain): """Create a redirect handler to a domain.""" class RedirectHandler(webapp2.RequestHandler): """Handler to redirect to domain.""" def get(self, _): self.redirect('https://' + to_domain + self.request.path_qs, permanent=True)
def wrapper(self): """Wrapper.""" if utils.is_oss_fuzz(): return check_admin_access(func)(self) return func(self)