def cleanup_testcases_and_issues(): """Clean up unneeded open testcases and their associated issues.""" jobs = data_handler.get_all_job_type_names() testcase_keys = ndb_utils.get_all_from_query(data_types.Testcase.query( ndb_utils.is_false(data_types.Testcase.triaged)), keys_only=True) top_crashes_by_project_and_platform_map = ( get_top_crashes_for_all_projects_and_platforms()) for testcase_key in testcase_keys: try: testcase = data_handler.get_testcase_by_id(testcase_key.id()) except errors.InvalidTestcaseError: # Already deleted. continue issue = issue_tracker_utils.get_issue_for_testcase(testcase) policy = issue_tracker_utils.get_issue_tracker_policy_for_testcase( testcase) if not policy: policy = issue_tracker_policy.get_empty() # Issue updates. update_os_labels(policy, testcase, issue) update_fuzz_blocker_label(policy, testcase, issue, top_crashes_by_project_and_platform_map) update_component_labels(testcase, issue) update_issue_ccs_from_owners_file(policy, testcase, issue) update_issue_owner_and_ccs_from_predator_results( policy, testcase, issue) update_issue_labels_for_flaky_testcase(policy, testcase, issue) # Testcase marking rules. mark_duplicate_testcase_as_closed_with_no_issue(testcase) mark_issue_as_closed_if_testcase_is_fixed(policy, testcase, issue) mark_testcase_as_closed_if_issue_is_closed(policy, testcase, issue) mark_testcase_as_closed_if_job_is_invalid(testcase, jobs) mark_unreproducible_testcase_as_fixed_if_issue_is_closed( testcase, issue) mark_unreproducible_testcase_and_issue_as_closed_after_deadline( policy, testcase, issue) # Notification, to be done at end after testcase state is updated from # previous rules. notify_closed_issue_if_testcase_is_open(policy, testcase, issue) notify_issue_if_testcase_is_invalid(policy, testcase, issue) notify_uploader_when_testcase_is_processed(policy, testcase, issue) # Mark testcase as triage complete if both testcase and associated issue # are closed. This also need to be done before the deletion rules. mark_testcase_as_triaged_if_needed(testcase, issue) # Testcase deletion rules. delete_unreproducible_testcase_with_no_issue(testcase)
def mark_testcase_as_triaged_if_needed(testcase, issue): """Mark testcase as triage complete if both testcase and associated issue are closed.""" # Check if testcase is open. If yes, bail out. if testcase.open: return # Check if there is an associated bug in open state. If yes, bail out. if issue: # Get latest issue object to ensure our update went through. issue = issue_tracker_utils.get_issue_for_testcase(testcase) if issue.is_open: return testcase.triaged = True testcase.put()
def do_post(self): """Upload a testcase.""" testcase_id = self.request.get('testcaseId') uploaded_file = self.get_upload() if testcase_id and not uploaded_file: testcase = helpers.get_testcase(testcase_id) if not access.can_user_access_testcase(testcase): raise helpers.AccessDeniedException() # Use minimized testcase for upload (if available). key = (testcase.minimized_keys if testcase.minimized_keys and testcase.minimized_keys != 'NA' else testcase.fuzzed_keys) uploaded_file = blobs.get_blob_info(key) # Extract filename part from blob. uploaded_file.filename = os.path.basename( uploaded_file.filename.replace('\\', os.sep)) job_type = self.request.get('job') if not job_type: raise helpers.EarlyExitException('Missing job name.', 400) if (not data_types.Job.VALID_NAME_REGEX.match(job_type) or not data_types.Job.query( data_types.Job.name == job_type).get()): raise helpers.EarlyExitException('Invalid job name.', 400) fuzzer_name = '' job_type_lowercase = job_type.lower() if 'libfuzzer' in job_type_lowercase: fuzzer_name = 'libFuzzer' elif 'afl' in job_type_lowercase: fuzzer_name = 'afl' target_name = self.request.get('target') if not fuzzer_name and target_name: raise helpers.EarlyExitException( 'Target name is not applicable to non-engine jobs (AFL, libFuzzer).', 400) if fuzzer_name and not target_name: raise helpers.EarlyExitException( 'Missing target name for engine job (AFL, libFuzzer).', 400) if (target_name and not data_types.Fuzzer.VALID_NAME_REGEX.match(target_name)): raise helpers.EarlyExitException('Invalid target name.', 400) fully_qualified_fuzzer_name = '' if fuzzer_name and target_name: fully_qualified_fuzzer_name, target_name = find_fuzz_target( fuzzer_name, target_name, job_type) if not fully_qualified_fuzzer_name: raise helpers.EarlyExitException('Target does not exist.', 400) if not access.has_access(need_privileged_access=False, job_type=job_type, fuzzer_name=(fully_qualified_fuzzer_name or fuzzer_name)): raise helpers.AccessDeniedException() multiple_testcases = bool(self.request.get('multiple')) http_flag = bool(self.request.get('http')) high_end_job = bool(self.request.get('highEnd')) bug_information = self.request.get('issue') crash_revision = self.request.get('revision') timeout = self.request.get('timeout') retries = self.request.get('retries') bug_summary_update_flag = bool(self.request.get('updateIssue')) additional_arguments = self.request.get('args') app_launch_command = self.request.get('cmd') platform_id = self.request.get('platform') testcase_metadata = self.request.get('metadata') if testcase_metadata: try: testcase_metadata = json.loads(testcase_metadata) if not isinstance(testcase_metadata, dict): raise helpers.EarlyExitException( 'Metadata is not a JSON object.', 400) except Exception: raise helpers.EarlyExitException('Invalid metadata JSON.', 400) archive_state = 0 bundled = False file_path_input = '' email = helpers.get_user_email() # If we have a AFL or libFuzzer target, use that for arguments. # Launch command looks like # python launcher.py {testcase_path} {target_name} if target_name: additional_arguments = '%%TESTCASE%% %s' % target_name # Certain modifications such as app launch command, issue updates are only # allowed for privileged users. privileged_user = access.has_access(need_privileged_access=True) if not privileged_user: if bug_information or bug_summary_update_flag: raise helpers.EarlyExitException( 'You are not privileged to update existing issues.', 400) need_privileged_access = utils.string_is_true( data_handler.get_value_from_job_definition( job_type, 'PRIVILEGED_ACCESS')) if need_privileged_access: raise helpers.EarlyExitException( 'You are not privileged to run this job type.', 400) if app_launch_command: raise helpers.EarlyExitException( 'You are not privileged to run arbitary launch commands.', 400) if testcase_metadata: raise helpers.EarlyExitException( 'You are not privileged to set testcase metadata.', 400) if crash_revision and crash_revision.isdigit(): crash_revision = int(crash_revision) else: crash_revision = 0 if bug_information and not bug_information.isdigit(): raise helpers.EarlyExitException('Bug is not a number.', 400) if not timeout: timeout = 0 elif not timeout.isdigit() or timeout == '0': raise helpers.EarlyExitException( 'Testcase timeout must be a number greater than 0.', 400) else: timeout = int(timeout) if timeout > 120: raise helpers.EarlyExitException( 'Testcase timeout may not be greater than 120 seconds.', 400) if retries: if retries.isdigit(): retries = int(retries) else: retries = None if retries is None or retries > MAX_RETRIES: raise helpers.EarlyExitException( 'Testcase retries must be a number less than %d.' % MAX_RETRIES, 400) else: retries = None try: gestures = ast.literal_eval(self.request.get('gestures')) except: gestures = [] if not gestures: gestures = [] job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job) if uploaded_file is not None: filename = ''.join([ x for x in uploaded_file.filename if x not in ' ;/?:@&=+$,{}|<>()\\' ]) key = str(uploaded_file.key()) if archive.is_archive(filename): archive_state = data_types.ArchiveStatus.FUZZED if archive_state: if multiple_testcases: if testcase_metadata: raise helpers.EarlyExitException( 'Testcase metadata not supported with multiple testcases.', 400) # Create a job to unpack an archive. metadata = data_types.BundledArchiveMetadata() metadata.blobstore_key = key metadata.timeout = timeout metadata.job_queue = job_queue metadata.job_type = job_type metadata.http_flag = http_flag metadata.archive_filename = filename metadata.uploader_email = email metadata.gestures = gestures metadata.crash_revision = crash_revision metadata.additional_arguments = additional_arguments metadata.bug_information = bug_information metadata.platform_id = platform_id metadata.app_launch_command = app_launch_command metadata.fuzzer_name = fuzzer_name metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name metadata.fuzzer_binary_name = target_name metadata.put() tasks.add_task('unpack', str(metadata.key.id()), job_type, queue=tasks.queue_for_job(job_type)) # Create a testcase metadata object to show the user their upload. upload_metadata = data_types.TestcaseUploadMetadata() upload_metadata.timestamp = datetime.datetime.utcnow() upload_metadata.filename = filename upload_metadata.blobstore_key = key upload_metadata.original_blobstore_key = key upload_metadata.status = 'Pending' upload_metadata.bundled = True upload_metadata.uploader_email = email upload_metadata.retries = retries upload_metadata.bug_summary_update_flag = bug_summary_update_flag upload_metadata.put() helpers.log('Uploaded multiple testcases.', helpers.VIEW_OPERATION) self.render_json({'multiple': True}) return file_path_input = guess_input_file(uploaded_file, filename) if not file_path_input: raise helpers.EarlyExitException(( "Unable to detect which file to launch. The main file\'s name " 'must contain either of %s.' % str(RUN_FILE_PATTERNS)), 400) else: raise helpers.EarlyExitException('Please select a file to upload.', 400) testcase_id = data_handler.create_user_uploaded_testcase( key, key, archive_state, filename, file_path_input, timeout, job_type, job_queue, http_flag, gestures, additional_arguments, bug_information, crash_revision, email, platform_id, app_launch_command, fuzzer_name, fully_qualified_fuzzer_name, target_name, bundled, retries, bug_summary_update_flag, additional_metadata=testcase_metadata) testcase = data_handler.get_testcase_by_id(testcase_id) issue = issue_tracker_utils.get_issue_for_testcase(testcase) if issue: report_url = data_handler.TESTCASE_REPORT_URL.format( domain=data_handler.get_domain(), testcase_id=testcase_id) comment = ('ClusterFuzz is analyzing your testcase. ' 'Developers can follow the progress at %s.' % report_url) issue.save(new_comment=comment) helpers.log('Uploaded testcase %s' % testcase_id, helpers.VIEW_OPERATION) self.render_json({'id': '%s' % testcase_id})
def update_issue_owner_and_ccs_from_predator_results(policy, testcase, issue, only_allow_ccs=False): """Assign the issue to an appropriate owner if possible.""" if not issue or not issue.is_open: return # If the issue already has an owner, we don't need to update the bug. if issue.assignee: return # If there are more than 3 suspected CLs, we can't be confident in the # results. Just skip any sort of notification to CL authors in this case. suspected_cls = _get_predator_result_item(testcase, 'suspected_cls') if not suspected_cls or len(suspected_cls) > 3: return # If we've assigned an owner or cc once before, it likely means we were # incorrect. Don't try again for this particular issue. if (issue_tracker_utils.was_label_added( issue, data_types.CHROMIUM_ISSUE_PREDATOR_AUTO_OWNER_LABEL) or issue_tracker_utils.was_label_added( issue, data_types.CHROMIUM_ISSUE_PREDATOR_AUTO_CC_LABEL)): return # Validate that the suspected CLs have all of the information we need before # continuing. This allows us to assume that they are well-formed later, # avoiding any potential exceptions that would interrupt this task. for suspected_cl in suspected_cls: url = suspected_cl.get('url') description = suspected_cl.get('description') author = suspected_cl.get('author') if not url or not description or not author: logs.log_error( 'Suspected CL for testcase %d is missing required information.' % testcase.key.id()) return if len(suspected_cls) == 1 and not only_allow_ccs: suspected_cl = suspected_cls[0] # If this owner has already been assigned before but has since been removed, # don't assign it to them again. for action in issue.actions: if action.assignee == suspected_cls[0]['author']: return # We have high confidence for the single-CL case, so we assign the owner. issue.labels.add(data_types.CHROMIUM_ISSUE_PREDATOR_AUTO_OWNER_LABEL) issue.assignee = suspected_cl['author'] issue.status = policy.status('assigned') issue_comment = ( 'Automatically assigning owner based on suspected regression ' 'changelist %s (%s).\n\n' 'If this is incorrect, please let us know why and apply the %s ' 'label. If you aren\'t the correct owner for this issue, please ' 'unassign yourself as soon as possible so it can be re-triaged.' % (suspected_cl['url'], suspected_cl['description'], data_types.CHROMIUM_ISSUE_PREDATOR_WRONG_CL_LABEL)) else: if testcase.get_metadata('has_issue_ccs_from_predator_results'): return issue_comment = ( 'Automatically adding ccs based on suspected regression changelists:' '\n\n') ccs_added = False for suspected_cl in suspected_cls: # Update the comment with the suspected CL, regardless of whether or not # we're ccing the author. This might, for example, catch the attention of # someone who has already been cced. author = suspected_cl['author'] issue_comment += '%s by %s - %s\n\n' % (suspected_cl['description'], author, suspected_cl['url']) if author in issue.ccs: continue # If an author has previously been manually removed from the cc list, # we assume they were incorrectly added. Don't try to add them again. author_was_removed = False for action in issue.actions: if author in action.ccs.removed: author_was_removed = True break if author_was_removed: continue issue.ccs.add(author) ccs_added = True if not ccs_added: # Everyone we'd expect to see has already been cced on the issue. No need # to spam it with another comment. Also, set the metadata to avoid doing # this again. testcase.set_metadata('has_issue_ccs_from_owners_file', True) return issue.labels.add(data_types.CHROMIUM_ISSUE_PREDATOR_AUTO_CC_LABEL) issue_comment += (( 'If this is incorrect, please let us know why and apply the ' '{label_text}.').format( label_text=issue.issue_tracker.label_text( data_types.CHROMIUM_ISSUE_PREDATOR_WRONG_CL_LABEL))) try: issue.save(new_comment=issue_comment, notify=True) except HttpError: # If we see such an error when we aren't setting an owner, it's unexpected. if only_allow_ccs or not issue.assignee: logs.log_error( 'Unable to update issue for test case %d.' % testcase.key.id()) return # Retry without setting the owner. They may not be a chromium project # member, in which case we can try falling back to cc. issue = issue_tracker_utils.get_issue_for_testcase(testcase) update_issue_owner_and_ccs_from_predator_results( policy, testcase, issue, only_allow_ccs=True)
def do_post(self): """Upload a testcase.""" email = helpers.get_user_email() testcase_id = request.get('testcaseId') uploaded_file = self.get_upload() if testcase_id and not uploaded_file: testcase = helpers.get_testcase(testcase_id) if not access.can_user_access_testcase(testcase): raise helpers.AccessDeniedException() # Use minimized testcase for upload (if available). key = (testcase.minimized_keys if testcase.minimized_keys and testcase.minimized_keys != 'NA' else testcase.fuzzed_keys) uploaded_file = blobs.get_blob_info(key) # Extract filename part from blob. uploaded_file.filename = os.path.basename( uploaded_file.filename.replace('\\', os.sep)) job_type = request.get('job') if not job_type: raise helpers.EarlyExitException('Missing job name.', 400) job = data_types.Job.query(data_types.Job.name == job_type).get() if not job: raise helpers.EarlyExitException('Invalid job name.', 400) fuzzer_name = request.get('fuzzer') job_type_lowercase = job_type.lower() if 'libfuzzer' in job_type_lowercase: fuzzer_name = 'libFuzzer' elif 'afl' in job_type_lowercase: fuzzer_name = 'afl' elif 'honggfuzz' in job_type_lowercase: fuzzer_name = 'honggfuzz' is_engine_job = fuzzer_name and environment.is_engine_fuzzer_job( job_type) target_name = request.get('target') if not is_engine_job and target_name: raise helpers.EarlyExitException( 'Target name is not applicable to non-engine jobs (AFL, libFuzzer).', 400) if is_engine_job and not target_name: raise helpers.EarlyExitException( 'Missing target name for engine job (AFL, libFuzzer).', 400) if (target_name and not data_types.Fuzzer.VALID_NAME_REGEX.match(target_name)): raise helpers.EarlyExitException('Invalid target name.', 400) fully_qualified_fuzzer_name = '' if is_engine_job and target_name: if job.is_external(): # External jobs don't run and set FuzzTarget entities as part of # fuzz_task. Set it here instead. fuzz_target = (data_handler.record_fuzz_target( fuzzer_name, target_name, job_type)) fully_qualified_fuzzer_name = fuzz_target.fully_qualified_name( ) target_name = fuzz_target.binary else: fully_qualified_fuzzer_name, target_name = find_fuzz_target( fuzzer_name, target_name, job_type) if (not access.has_access(need_privileged_access=False, job_type=job_type, fuzzer_name=(fully_qualified_fuzzer_name or fuzzer_name)) and not _is_uploader_allowed(email)): raise helpers.AccessDeniedException() multiple_testcases = bool(request.get('multiple')) http_flag = bool(request.get('http')) high_end_job = bool(request.get('highEnd')) bug_information = request.get('issue') crash_revision = request.get('revision') timeout = request.get('timeout') retries = request.get('retries') bug_summary_update_flag = bool(request.get('updateIssue')) quiet_flag = bool(request.get('quiet')) additional_arguments = request.get('args') app_launch_command = request.get('cmd') platform_id = request.get('platform') issue_labels = request.get('issue_labels') gestures = request.get('gestures') or '[]' stacktrace = request.get('stacktrace') crash_data = None if job.is_external(): if not stacktrace: raise helpers.EarlyExitException( 'Stacktrace required for external jobs.', 400) if not crash_revision: raise helpers.EarlyExitException( 'Revision required for external jobs.', 400) crash_data = stack_analyzer.get_crash_data( stacktrace, fuzz_target=target_name, symbolize_flag=False, already_symbolized=True, detect_ooms_and_hangs=True) elif stacktrace: raise helpers.EarlyExitException( 'Should not specify stacktrace for non-external jobs.', 400) testcase_metadata = request.get('metadata', {}) if testcase_metadata: try: testcase_metadata = json.loads(testcase_metadata) except Exception as e: raise helpers.EarlyExitException('Invalid metadata JSON.', 400) from e if not isinstance(testcase_metadata, dict): raise helpers.EarlyExitException( 'Metadata is not a JSON object.', 400) if issue_labels: testcase_metadata['issue_labels'] = issue_labels try: gestures = ast.literal_eval(gestures) except Exception as e: raise helpers.EarlyExitException('Failed to parse gestures.', 400) from e archive_state = 0 bundled = False file_path_input = '' # Certain modifications such as app launch command, issue updates are only # allowed for privileged users. privileged_user = access.has_access(need_privileged_access=True) if not privileged_user: if bug_information or bug_summary_update_flag: raise helpers.EarlyExitException( 'You are not privileged to update existing issues.', 400) need_privileged_access = utils.string_is_true( data_handler.get_value_from_job_definition( job_type, 'PRIVILEGED_ACCESS')) if need_privileged_access: raise helpers.EarlyExitException( 'You are not privileged to run this job type.', 400) if app_launch_command: raise helpers.EarlyExitException( 'You are not privileged to run arbitrary launch commands.', 400) if (testcase_metadata and not _allow_unprivileged_metadata(testcase_metadata)): raise helpers.EarlyExitException( 'You are not privileged to set testcase metadata.', 400) if additional_arguments: raise helpers.EarlyExitException( 'You are not privileged to add command-line arguments.', 400) if gestures: raise helpers.EarlyExitException( 'You are not privileged to run arbitrary gestures.', 400) # TODO(aarya): Remove once AFL is migrated to engine pipeline. if target_name: additional_arguments = '%TESTCASE%' if crash_revision and crash_revision.isdigit(): crash_revision = int(crash_revision) else: crash_revision = 0 if bug_information == '0': # Auto-recover from this bad input. bug_information = None if bug_information and not bug_information.isdigit(): raise helpers.EarlyExitException('Bug is not a number.', 400) if not timeout: timeout = 0 elif not timeout.isdigit() or timeout == '0': raise helpers.EarlyExitException( 'Testcase timeout must be a number greater than 0.', 400) else: timeout = int(timeout) if timeout > 120: raise helpers.EarlyExitException( 'Testcase timeout may not be greater than 120 seconds.', 400) if retries: if retries.isdigit(): retries = int(retries) else: retries = None if retries is None or retries > MAX_RETRIES: raise helpers.EarlyExitException( 'Testcase retries must be a number less than %d.' % MAX_RETRIES, 400) else: retries = None job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job) if uploaded_file is not None: filename = ''.join([ x for x in uploaded_file.filename if x not in ' ;/?:@&=+$,{}|<>()\\' ]) key = str(uploaded_file.key()) if archive.is_archive(filename): archive_state = data_types.ArchiveStatus.FUZZED if archive_state: if multiple_testcases: # Create a job to unpack an archive. metadata = data_types.BundledArchiveMetadata() metadata.blobstore_key = key metadata.timeout = timeout metadata.job_queue = job_queue metadata.job_type = job_type metadata.http_flag = http_flag metadata.archive_filename = filename metadata.uploader_email = email metadata.gestures = gestures metadata.crash_revision = crash_revision metadata.additional_arguments = additional_arguments metadata.bug_information = bug_information metadata.platform_id = platform_id metadata.app_launch_command = app_launch_command metadata.fuzzer_name = fuzzer_name metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name metadata.fuzzer_binary_name = target_name metadata.put() tasks.add_task('unpack', str(metadata.key.id()), job_type, queue=tasks.queue_for_job(job_type)) # Create a testcase metadata object to show the user their upload. upload_metadata = data_types.TestcaseUploadMetadata() upload_metadata.timestamp = datetime.datetime.utcnow() upload_metadata.filename = filename upload_metadata.blobstore_key = key upload_metadata.original_blobstore_key = key upload_metadata.status = 'Pending' upload_metadata.bundled = True upload_metadata.uploader_email = email upload_metadata.retries = retries upload_metadata.bug_summary_update_flag = bug_summary_update_flag upload_metadata.quiet_flag = quiet_flag upload_metadata.additional_metadata_string = json.dumps( testcase_metadata) upload_metadata.bug_information = bug_information upload_metadata.put() helpers.log('Uploaded multiple testcases.', helpers.VIEW_OPERATION) return self.render_json({'multiple': True}) file_path_input = guess_input_file(uploaded_file, filename) if not file_path_input: raise helpers.EarlyExitException(( "Unable to detect which file to launch. The main file\'s name " 'must contain either of %s.' % str(RUN_FILE_PATTERNS)), 400) else: raise helpers.EarlyExitException('Please select a file to upload.', 400) testcase_id = data_handler.create_user_uploaded_testcase( key, key, archive_state, filename, file_path_input, timeout, job, job_queue, http_flag, gestures, additional_arguments, bug_information, crash_revision, email, platform_id, app_launch_command, fuzzer_name, fully_qualified_fuzzer_name, target_name, bundled, retries, bug_summary_update_flag, quiet_flag, additional_metadata=testcase_metadata, crash_data=crash_data) if not quiet_flag: testcase = data_handler.get_testcase_by_id(testcase_id) issue = issue_tracker_utils.get_issue_for_testcase(testcase) if issue: report_url = data_handler.TESTCASE_REPORT_URL.format( domain=data_handler.get_domain(), testcase_id=testcase_id) comment = ('ClusterFuzz is analyzing your testcase. ' 'Developers can follow the progress at %s.' % report_url) issue.save(new_comment=comment) helpers.log('Uploaded testcase %s' % testcase_id, helpers.VIEW_OPERATION) return self.render_json({'id': '%s' % testcase_id})
def do_post(self): """Upload a testcase.""" email = helpers.get_user_email() testcase_id = self.request.get("testcaseId") uploaded_file = self.get_upload() if testcase_id and not uploaded_file: testcase = helpers.get_testcase(testcase_id) if not access.can_user_access_testcase(testcase): raise helpers.AccessDeniedException() # Use minimized testcase for upload (if available). key = (testcase.minimized_keys if testcase.minimized_keys and testcase.minimized_keys != "NA" else testcase.fuzzed_keys) uploaded_file = blobs.get_blob_info(key) # Extract filename part from blob. uploaded_file.filename = os.path.basename( uploaded_file.filename.replace("\\", os.sep)) job_type = self.request.get("job") if not job_type: raise helpers.EarlyExitException("Missing job name.", 400) if (not data_types.Job.VALID_NAME_REGEX.match(job_type) or not data_types.Job.query( data_types.Job.name == job_type).get()): raise helpers.EarlyExitException("Invalid job name.", 400) fuzzer_name = "" job_type_lowercase = job_type.lower() if "libfuzzer" in job_type_lowercase: fuzzer_name = "libFuzzer" elif "afl" in job_type_lowercase: fuzzer_name = "afl" target_name = self.request.get("target") if not fuzzer_name and target_name: raise helpers.EarlyExitException( "Target name is not applicable to non-engine jobs (AFL, libFuzzer).", 400, ) if fuzzer_name and not target_name: raise helpers.EarlyExitException( "Missing target name for engine job (AFL, libFuzzer).", 400) if target_name and not data_types.Fuzzer.VALID_NAME_REGEX.match( target_name): raise helpers.EarlyExitException("Invalid target name.", 400) fully_qualified_fuzzer_name = "" if fuzzer_name and target_name: fully_qualified_fuzzer_name, target_name = find_fuzz_target( fuzzer_name, target_name, job_type) if not fully_qualified_fuzzer_name: raise helpers.EarlyExitException("Target does not exist.", 400) if not access.has_access( need_privileged_access=False, job_type=job_type, fuzzer_name=(fully_qualified_fuzzer_name or fuzzer_name), ) and not _is_uploader_allowed(email): raise helpers.AccessDeniedException() multiple_testcases = bool(self.request.get("multiple")) http_flag = bool(self.request.get("http")) high_end_job = bool(self.request.get("highEnd")) bug_information = self.request.get("issue") crash_revision = self.request.get("revision") timeout = self.request.get("timeout") retries = self.request.get("retries") bug_summary_update_flag = bool(self.request.get("updateIssue")) quiet_flag = bool(self.request.get("quiet")) additional_arguments = self.request.get("args") app_launch_command = self.request.get("cmd") platform_id = self.request.get("platform") issue_labels = self.request.get("issue_labels") gestures = self.request.get("gestures") or "[]" testcase_metadata = self.request.get("metadata", {}) if testcase_metadata: try: testcase_metadata = json.loads(testcase_metadata) except Exception: raise helpers.EarlyExitException("Invalid metadata JSON.", 400) if not isinstance(testcase_metadata, dict): raise helpers.EarlyExitException( "Metadata is not a JSON object.", 400) if issue_labels: testcase_metadata["issue_labels"] = issue_labels try: gestures = ast.literal_eval(gestures) except Exception: raise helpers.EarlyExitException("Failed to parse gestures.", 400) archive_state = 0 bundled = False file_path_input = "" # Certain modifications such as app launch command, issue updates are only # allowed for privileged users. privileged_user = access.has_access(need_privileged_access=True) if not privileged_user: if bug_information or bug_summary_update_flag: raise helpers.EarlyExitException( "You are not privileged to update existing issues.", 400) need_privileged_access = utils.string_is_true( data_handler.get_value_from_job_definition( job_type, "PRIVILEGED_ACCESS")) if need_privileged_access: raise helpers.EarlyExitException( "You are not privileged to run this job type.", 400) if app_launch_command: raise helpers.EarlyExitException( "You are not privileged to run arbitrary launch commands.", 400) if testcase_metadata and not _allow_unprivileged_metadata( testcase_metadata): raise helpers.EarlyExitException( "You are not privileged to set testcase metadata.", 400) if additional_arguments: raise helpers.EarlyExitException( "You are not privileged to add command-line arguments.", 400) if gestures: raise helpers.EarlyExitException( "You are not privileged to run arbitrary gestures.", 400) # TODO(aarya): Remove once AFL is migrated to engine pipeline. if target_name: additional_arguments = "%TESTCASE%" if crash_revision and crash_revision.isdigit(): crash_revision = int(crash_revision) else: crash_revision = 0 if bug_information == "0": # Auto-recover from this bad input. bug_information = None if bug_information and not bug_information.isdigit(): raise helpers.EarlyExitException("Bug is not a number.", 400) if not timeout: timeout = 0 elif not timeout.isdigit() or timeout == "0": raise helpers.EarlyExitException( "Testcase timeout must be a number greater than 0.", 400) else: timeout = int(timeout) if timeout > 120: raise helpers.EarlyExitException( "Testcase timeout may not be greater than 120 seconds.", 400) if retries: if retries.isdigit(): retries = int(retries) else: retries = None if retries is None or retries > MAX_RETRIES: raise helpers.EarlyExitException( "Testcase retries must be a number less than %d." % MAX_RETRIES, 400) else: retries = None job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job) if uploaded_file is not None: filename = "".join(x for x in uploaded_file.filename if x not in " ;/?:@&=+$,{}|<>()\\") key = str(uploaded_file.key()) if archive.is_archive(filename): archive_state = data_types.ArchiveStatus.FUZZED if archive_state: if multiple_testcases: # Create a job to unpack an archive. metadata = data_types.BundledArchiveMetadata() metadata.blobstore_key = key metadata.timeout = timeout metadata.job_queue = job_queue metadata.job_type = job_type metadata.http_flag = http_flag metadata.archive_filename = filename metadata.uploader_email = email metadata.gestures = gestures metadata.crash_revision = crash_revision metadata.additional_arguments = additional_arguments metadata.bug_information = bug_information metadata.platform_id = platform_id metadata.app_launch_command = app_launch_command metadata.fuzzer_name = fuzzer_name metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name metadata.fuzzer_binary_name = target_name metadata.put() tasks.add_task( "unpack", str(metadata.key.id()), job_type, queue=tasks.queue_for_job(job_type), ) # Create a testcase metadata object to show the user their upload. upload_metadata = data_types.TestcaseUploadMetadata() upload_metadata.timestamp = datetime.datetime.utcnow() upload_metadata.filename = filename upload_metadata.blobstore_key = key upload_metadata.original_blobstore_key = key upload_metadata.status = "Pending" upload_metadata.bundled = True upload_metadata.uploader_email = email upload_metadata.retries = retries upload_metadata.bug_summary_update_flag = bug_summary_update_flag upload_metadata.quiet_flag = quiet_flag upload_metadata.additional_metadata_string = json.dumps( testcase_metadata) upload_metadata.put() helpers.log("Uploaded multiple testcases.", helpers.VIEW_OPERATION) self.render_json({"multiple": True}) return file_path_input = guess_input_file(uploaded_file, filename) if not file_path_input: raise helpers.EarlyExitException( ("Unable to detect which file to launch. The main file's name " "must contain either of %s." % str(RUN_FILE_PATTERNS)), 400, ) else: raise helpers.EarlyExitException("Please select a file to upload.", 400) testcase_id = data_handler.create_user_uploaded_testcase( key, key, archive_state, filename, file_path_input, timeout, job_type, job_queue, http_flag, gestures, additional_arguments, bug_information, crash_revision, email, platform_id, app_launch_command, fuzzer_name, fully_qualified_fuzzer_name, target_name, bundled, retries, bug_summary_update_flag, quiet_flag, additional_metadata=testcase_metadata, ) if not quiet_flag: testcase = data_handler.get_testcase_by_id(testcase_id) issue = issue_tracker_utils.get_issue_for_testcase(testcase) if issue: report_url = data_handler.TESTCASE_REPORT_URL.format( domain=data_handler.get_domain(), testcase_id=testcase_id) comment = ("ClusterFuzz is analyzing your testcase. " "Developers can follow the progress at %s." % report_url) issue.save(new_comment=comment) helpers.log("Uploaded testcase %s" % testcase_id, helpers.VIEW_OPERATION) self.render_json({"id": "%s" % testcase_id})