def get_components_list(component_revisions_dict, job_type): """Return a prioritized order of components based on job type.""" components = sorted(component_revisions_dict.keys()) if utils.is_chromium(): # Components prioritization only applies to non-chromium projects. return components project_name = data_handler.get_project_name(job_type) if not project_name: # No project name found in job environment, return list as-is. return components project_src = '/src/' + project_name for component in components.copy(): if component == project_src: components.remove(component) components.insert(0, component) break if project_name.lower() in os.path.basename(component).lower(): components.remove(component) components.insert(0, component) # Keep trying in case an exact match is found later. return components
def _make_space(requested_size, current_build_dir=None): """Try to make the requested number of bytes available by deleting builds.""" if utils.is_chromium(): min_free_disk_space = MIN_FREE_DISK_SPACE_CHROMIUM else: min_free_disk_space = MIN_FREE_DISK_SPACE_DEFAULT builds_directory = environment.get_value('BUILDS_DIR') error_message = 'Need at least %d GB of free disk space.' % ( (min_free_disk_space + requested_size) / 1024**3) for _ in xrange(MAX_EVICTED_BUILDS): free_disk_space = shell.get_free_disk_space(builds_directory) if free_disk_space is None: # Can't determine free disk space, bail out. return False if requested_size + min_free_disk_space < free_disk_space: return True if not _evict_build(current_build_dir): logs.log_error(error_message) return False free_disk_space = shell.get_free_disk_space(builds_directory) result = requested_size + min_free_disk_space < free_disk_space if not result: logs.log_error(error_message) return result
def update_fuzz_blocker_label(policy, testcase, issue, top_crashes_by_project_and_platform_map): """Add top crash label to issue.""" fuzz_blocker_label = policy.label('fuzz_blocker') if not fuzz_blocker_label: return if not issue: return if not testcase.open: return top_crash_platforms = get_top_crash_platforms( testcase, top_crashes_by_project_and_platform_map) if not top_crash_platforms: # Not a top crasher, bail out. return if issue_tracker_utils.was_label_added(issue, fuzz_blocker_label): # Issue was already marked a top crasher, bail out. return if len(top_crash_platforms) == 1: platform_message = '%s platform' % top_crash_platforms[0] else: platform_message = '%s and %s platforms' % (', '.join( top_crash_platforms[:-1]), top_crash_platforms[-1]) fuzzer_name = ( testcase.get_metadata('fuzzer_binary_name') or testcase.fuzzer_name) update_message = ( 'This crash occurs very frequently on %s and is likely preventing the ' 'fuzzer %s from making much progress. Fixing this will allow more bugs ' 'to be found.' % (platform_message, fuzzer_name)) if utils.is_oss_fuzz(): update_message += OSS_FUZZ_INCORRECT_COMMENT elif utils.is_chromium(): update_message += '\n\nMarking this bug as a blocker for next Beta release.' update_message = _append_generic_incorrect_comment( update_message, policy, issue, ' and remove the {label_text}.'.format( label_text=issue.issue_tracker.label_text( data_types.CHROMIUM_ISSUE_RELEASEBLOCK_BETA_LABEL))) issue.labels.add(data_types.CHROMIUM_ISSUE_RELEASEBLOCK_BETA_LABEL) # Update with the next beta for trunk, and remove existing milestone label. beta_milestone_label = ( 'M-%d' % build_info.get_release_milestone('head', testcase.platform)) if beta_milestone_label not in issue.labels: issue.labels.remove_by_prefix('M-') issue.labels.add(beta_milestone_label) logs.log(update_message) issue.labels.add(fuzz_blocker_label) issue.save(new_comment=update_message, notify=True)
def get(self): """Handles get request.""" email = helpers.get_user_email() if not email: raise helpers.AccessDeniedException() is_privileged_or_domain_user = access.has_access( need_privileged_access=False) if is_privileged_or_domain_user or _is_uploader_allowed(email): # Privileged, domain and upload users can see all job and fuzzer names. allowed_jobs = data_handler.get_all_job_type_names() allowed_fuzzers = data_handler.get_all_fuzzer_names_including_children( include_parents=True) else: # Check if this is an external user with access to certain fuzzers/jobs. allowed_jobs = external_users.allowed_jobs_for_user(email) allowed_fuzzers = external_users.allowed_fuzzers_for_user( email, include_from_jobs=True) if not allowed_fuzzers and not allowed_jobs: raise helpers.AccessDeniedException() has_issue_tracker = bool(data_handler.get_issue_tracker_name()) result, params = get_result() return self.render( 'upload.html', { 'fieldValues': { 'blackboxFuzzers': filter_blackbox_fuzzers(allowed_fuzzers), 'jobs': allowed_jobs, 'libfuzzerTargets': filter_target_names(allowed_fuzzers, 'libFuzzer'), 'aflTargets': filter_target_names(allowed_fuzzers, 'afl'), 'honggfuzzTargets': filter_target_names(allowed_fuzzers, 'honggfuzz'), 'isChromium': utils.is_chromium(), 'sandboxedJobs': data_types.INTERNAL_SANDBOXED_JOB_TYPES, 'csrfToken': form.generate_csrf_token(), 'isExternalUser': not is_privileged_or_domain_user, 'uploadInfo': gcs.prepare_blob_upload()._asdict(), 'hasIssueTracker': has_issue_tracker, }, 'params': params, 'result': result })
def get(self): """Handles get request.""" email = helpers.get_user_email() if not email: raise helpers.AccessDeniedException() is_privileged_or_domain_user = access.has_access( need_privileged_access=False) if is_privileged_or_domain_user or _is_uploader_allowed(email): # Privileged, domain and upload users can see all job and fuzzer names. allowed_jobs = data_handler.get_all_job_type_names() allowed_fuzzers = data_handler.get_all_fuzzer_names_including_children( include_parents=True) else: # Check if this is an external user with access to certain fuzzers/jobs. allowed_jobs = external_users.allowed_jobs_for_user(email) allowed_fuzzers = external_users.allowed_fuzzers_for_user( email, include_from_jobs=True) if not allowed_fuzzers and not allowed_jobs: raise helpers.AccessDeniedException() has_issue_tracker = bool(data_handler.get_issue_tracker_name()) result, params = get_result(self) self.render( "upload.html", { "fieldValues": { "jobs": allowed_jobs, "libfuzzerTargets": filter_target_names(allowed_fuzzers, "libFuzzer"), "aflTargets": filter_target_names(allowed_fuzzers, "afl"), "isChromium": utils.is_chromium(), "sandboxedJobs": data_types.INTERNAL_SANDBOXED_JOB_TYPES, "csrfToken": form.generate_csrf_token(), "isExternalUser": not is_privileged_or_domain_user, "uploadInfo": gcs.prepare_blob_upload()._asdict(), "hasIssueTracker": has_issue_tracker, }, "params": params, "result": result, }, )
def create_impact_task_if_needed(testcase): """Creates an impact task if needed.""" # Impact doesn't make sense for non-chromium projects. if not utils.is_chromium(): return # Impact is only applicable to chromium project, otherwise bail out. if testcase.project_name != 'chromium': return # We cannot run impact job for custom binaries since we don't have any # archived production builds for these. if build_manager.is_custom_binary(): return tasks.add_task('impact', testcase.key.id(), testcase.job_type)
def critical_tasks_completed(testcase): """Check to see if all critical tasks have finished running on a test case.""" if testcase.status == 'Unreproducible': # These tasks don't apply to unreproducible testcases. return True if testcase.one_time_crasher_flag: # These tasks don't apply to flaky testcases. return True # For non-chromium projects, impact and blame tasks are not applicable. if not utils.is_chromium(): return testcase.minimized_keys and testcase.regression return bool(testcase.minimized_keys and testcase.regression and testcase.is_impact_set_flag)
def get(self): """Render dead bots as json (used by automated scripts).""" # This a publicly exposed chromium-specific page. if utils.is_chromium(): heartbeats = ndb_utils.get_all_from_model(data_types.Heartbeat) else: raise helpers.EarlyExitException('Dead bots list unavailable.', 400) result = {} alive_cutoff = _get_alive_cutoff() for heartbeat in heartbeats: if heartbeat.last_beat_time <= alive_cutoff: result[heartbeat.bot_name] = 'dead' self.render_json(result)
def get_src_map(revision): """Get SrcMap json.""" if utils.is_chromium(): return None revision_info_url_format = environment.get_value('REVISION_VARS_URL') if not revision_info_url_format: return None revision_info_url = revision_info_url_format % revision url_content = _get_url_content(revision_info_url) if not url_content: logs.log_error('Failed to get component revisions from %s.' % revision_info_url) return None return _to_dict(url_content)
def get(self): """Get and render the testcase list in HTML.""" result, params = get_result(self) field_values = { 'projects': data_handler.get_all_project_names(), 'fuzzers': data_handler.get_all_fuzzer_names_including_children( include_parents=True), 'jobs': data_handler.get_all_job_type_names(), 'shouldShowImpact': utils.is_chromium() } self.render('testcase-list.html', { 'fieldValues': field_values, 'result': result, 'params': params })
def get_components_list(component_revisions_dict, job_type): """Return a prioritized order of components based on job type.""" components = sorted(component_revisions_dict.keys()) if utils.is_chromium(): # Components prioritization only applies to non-chromium projects. return components project_name = data_handler.get_project_name(job_type) if not project_name: # No project name found in job environment, return list as-is. return components project_src = '/src/%s' % project_name for component in components: if component == project_src: components.remove(component) components.insert(0, component) break return components
def create_blame_task_if_needed(testcase): """Creates a blame task if needed.""" # Blame doesn't work for non-chromium projects. if not utils.is_chromium(): return # Blame is only applicable to chromium project, otherwise bail out. if testcase.project_name != 'chromium': return # We cannot run blame job for custom binaries since we don't have any context # on the crash revision and regression range. if build_manager.is_custom_binary(): return # Don't send duplicate issues to Predator. This causes issues with metrics # tracking and wastes cycles. if testcase.status == 'Duplicate': return create_task = False if testcase.one_time_crasher_flag: # For unreproducible testcases, it is still beneficial to get component # information from blame task. create_task = True else: # Reproducible testcase. # Step 1: Check if the regression task finished. If not, bail out. if not testcase.regression: return # Step 2: Check if the symbolize task is applicable and finished. If not, # bail out. if build_manager.has_symbolized_builds() and not testcase.symbolized: return create_task = True if create_task: tasks.add_task('blame', testcase.key.id(), testcase.job_type)
def _setup_application_path(self, build_dir=None, app_path='APP_PATH', build_update=False): """Sets up APP_PATH environment variables for revision build.""" logs.log('Setup application path.') if not build_dir: build_dir = self.build_dir # Make sure to initialize so that we don't carry stale values # in case of errors. app_path can be APP_PATH or APP_PATH_DEBUG. environment.set_value(app_path, '') environment.set_value('APP_DIR', '') environment.set_value('BUILD_DIR', build_dir) environment.set_value('GN_ARGS_PATH', '') environment.set_value('LLVM_SYMBOLIZER_PATH', environment.get_default_tool_path('llvm-symbolizer')) # Initialize variables. fuzzer_directory = environment.get_value('FUZZER_DIR') search_directories = [build_dir] if fuzzer_directory: search_directories.append(fuzzer_directory) set_environment_vars(search_directories, app_path=app_path) absolute_file_path = environment.get_value(app_path) app_directory = environment.get_value('APP_DIR') if not absolute_file_path: return # Set the symlink if needed. symbolic_link_target = environment.get_value('SYMBOLIC_LINK') if symbolic_link_target: os.system('mkdir --parents %s' % os.path.dirname(symbolic_link_target)) os.system('rm %s' % symbolic_link_target) os.system('ln -s %s %s' % (app_directory, symbolic_link_target)) # Android specific initialization. if environment.platform() == 'ANDROID': # Prepare device for app install. android.device.initialize_device() # On Android, we may need to write a command line file. We do this in # advance so that we do not have to write this to the device multiple # times. # TODO(mbarbella): Build code should not depend on fuzzing. from fuzzing import tests tests.get_command_line_for_application(write_command_line_file=True) # Install the app if it does not exist. android.device.install_application_if_needed(absolute_file_path, build_update) return if not build_update: return # The following hacks are only applicable in Chromium. if utils.is_chromium(): return # Chromium specific workaround for missing ICU data file in root directory. # Copy it from relative folders. See crbug.com/741603. root_icu_data_file_path = os.path.join(app_directory, ICU_DATA_FILENAME) find_icu_data_file_path = utils.find_binary_path(app_directory, ICU_DATA_FILENAME) if find_icu_data_file_path and not os.path.exists(root_icu_data_file_path): shell.copy_file(find_icu_data_file_path, root_icu_data_file_path)
def execute_task(testcase_id, job_type): """Attempt to find if the testcase affects release branches on Chromium.""" # This shouldn't ever get scheduled, but check just in case. if not utils.is_chromium(): return # Locate the testcase associated with the id. testcase = data_handler.get_testcase_by_id(testcase_id) # If this testcase is fixed, we should no longer be doing impact testing. if testcase.fixed and testcase.is_impact_set_flag: return # For testcases with status unreproducible, we just do impact analysis just # once. if testcase.is_status_unreproducible() and testcase.is_impact_set_flag: return # Update comments only after checking the above bailout conditions. data_handler.update_testcase_comment(testcase, data_types.TaskState.STARTED) # This task is not applicable to unreproducible testcases. if testcase.one_time_crasher_flag: data_handler.update_testcase_comment( testcase, data_types.TaskState.ERROR, 'Not applicable for unreproducible testcases') return # This task is not applicable for custom binaries. We cannot remove the # creation of such tasks specifically for custom binary testcase in cron, # so exit gracefully. if build_manager.is_custom_binary(): data_handler.update_testcase_comment( testcase, data_types.TaskState.FINISHED, 'Not applicable for custom binaries') return # If we don't have a stable or beta build url pattern, we try to use build # information url to make a guess. if not build_manager.has_production_builds(): if not testcase.regression: data_handler.update_testcase_comment( testcase, data_types.TaskState.FINISHED, 'Cannot run without regression range, will re-run once regression ' 'task finishes') return impacts = get_impacts_from_url(testcase.regression, testcase.job_type) testcase = data_handler.get_testcase_by_id(testcase_id) set_testcase_with_impacts(testcase, impacts) data_handler.update_testcase_comment(testcase, data_types.TaskState.FINISHED) return # Setup testcase and its dependencies. file_list, _, testcase_file_path = setup.setup_testcase(testcase) if not file_list: return # Setup stable, beta builds and get impact and crash stacktrace. try: impacts = get_impacts_on_prod_builds(testcase, testcase_file_path) except BuildFailedException as error: testcase = data_handler.get_testcase_by_id(testcase_id) data_handler.update_testcase_comment(testcase, data_types.TaskState.ERROR, error.message) tasks.add_task('impact', testcase_id, job_type, wait_time=environment.get_value('FAIL_WAIT')) return testcase = data_handler.get_testcase_by_id(testcase_id) set_testcase_with_impacts(testcase, impacts) # Set stacktrace in case we have a unreproducible crash on trunk, # but it crashes on one of the production builds. if testcase.is_status_unreproducible() and impacts.get_extra_trace(): testcase.crash_stacktrace = data_handler.filter_stacktrace( '%s\n\n%s' % (data_handler.get_stacktrace(testcase), impacts.get_extra_trace())) data_handler.update_testcase_comment(testcase, data_types.TaskState.FINISHED)
class RedirectHandler(webapp2.RequestHandler): """Handler to redirect to domain.""" def get(self, _): self.redirect('https://' + to_domain + self.request.path_qs, permanent=True) return RedirectHandler # Add item to the navigation menu. Order is important. base_handler.add_menu('Testcases', '/testcases') base_handler.add_menu('Fuzzer Statistics', '/fuzzer-stats') base_handler.add_menu('Crash Statistics', '/crash-stats') base_handler.add_menu('Upload Testcase', '/upload-testcase') if utils.is_chromium(): base_handler.add_menu('Crashes by range', '/commit-range') if not utils.is_oss_fuzz(): base_handler.add_menu('Fuzzers', '/fuzzers') base_handler.add_menu('Corpora', '/corpora') base_handler.add_menu('Bots', '/bots') base_handler.add_menu('Jobs', '/jobs') base_handler.add_menu('Configuration', '/configuration') base_handler.add_menu('Report Bug', '/report-bug') base_handler.add_menu('Documentation', '/docs') # We need to separate routes for cron to avoid redirection. _CRON_ROUTES = [ ('/backup', backup.Handler),
from handlers.testcase_detail import delete from handlers.testcase_detail import download_testcase from handlers.testcase_detail import find_similar_issues from handlers.testcase_detail import mark_fixed from handlers.testcase_detail import mark_security from handlers.testcase_detail import mark_unconfirmed from handlers.testcase_detail import redo from handlers.testcase_detail import remove_duplicate from handlers.testcase_detail import remove_group from handlers.testcase_detail import remove_issue from handlers.testcase_detail import testcase_variants from handlers.testcase_detail import update_from_trunk from handlers.testcase_detail import update_issue from metrics import logs _is_chromium = utils.is_chromium() _is_oss_fuzz = utils.is_oss_fuzz() class _TrailingSlashRemover(webapp2.RequestHandler): def get(self, url): self.redirect(url) def redirect_to(to_domain): """Create a redirect handler to a domain.""" class RedirectHandler(webapp2.RequestHandler): """Handler to redirect to domain.""" def get(self, _): self.redirect('https://' + to_domain + self.request.path_qs, permanent=True)
def get_component_revisions_dict(revision, job_type): """Retrieve revision vars dict.""" if revision == 0 or revision == '0' or revision is None: # Return empty dict for zero start revision. return {} config = db_config.get() revision_info_url_format = db_config.get_value_for_job( config.revision_vars_url, job_type) if not revision_info_url_format: return None project_name = data_handler.get_project_name(job_type) revisions_dict = {} if utils.is_chromium(): component = data_handler.get_component_name(job_type) repository = data_handler.get_repository_for_component(component) if repository and not _is_clank(revision_info_url_format): revision_hash = _git_commit_position_to_git_hash_for_chromium( revision, repository) if revision_hash is None: return None # FIXME: While we check for this explicitly appended component in all # applicable cases that we know of within this codebase, if the dict # is shared with an external service (e.g. Predator) we may need to clean # this up beforehand. revisions_dict['/src'] = { 'name': _get_component_display_name(component, project_name), 'url': _git_url_for_chromium_repository(repository), 'rev': revision_hash, 'commit_pos': revision } # Use revision hash for info url later. revision = revision_hash revision_info_url = revision_info_url_format % revision url_content = _get_url_content(revision_info_url) if not url_content: logs.log_error('Failed to get component revisions from %s.' % revision_info_url) return None # Parse as per DEPS format. if _is_deps(revision_info_url): deps_revisions_dict = deps_to_revisions_dict(url_content) if not deps_revisions_dict: return None revisions_dict.update(deps_revisions_dict) return revisions_dict # Parse as per Clank DEPS format. if _is_clank(revision_info_url): return _clank_revision_file_to_revisions_dict(url_content) # Default case: parse content as yaml. revisions_dict = _to_dict(url_content) if not revisions_dict: logs.log_error('Failed to parse component revisions from %s.' % revision_info_url) return None # Parse as per source map format. if revision_info_url.endswith(SOURCE_MAP_EXTENSION): revisions_dict = _src_map_to_revisions_dict(revisions_dict, project_name) return revisions_dict