def create_variant_tasks_if_needed(testcase): """Creates a variant task if needed.""" if testcase.duplicate_of: # If another testcase exists with same params, no need to spend cycles on # calculating variants again. return testcase_id = testcase.key.id() project = data_handler.get_project_name(testcase.job_type) jobs = data_types.Job.query(data_types.Job.project == project) for job in jobs: # The variant needs to be tested in a different job type than us. job_type = job.name if testcase.job_type == job_type: continue # Don't try to reproduce engine fuzzer testcase with blackbox fuzzer # testcases and vice versa. if (environment.is_engine_fuzzer_job(testcase.job_type) != environment.is_engine_fuzzer_job(job_type)): continue # Skip experimental jobs. job_environment = job.get_environment() if utils.string_is_true(job_environment.get('EXPERIMENTAL')): continue queue = tasks.queue_for_platform(job.platform) tasks.add_task('variant', testcase_id, job_type, queue) variant = data_handler.get_testcase_variant(testcase_id, job_type) variant.status = data_types.TestcaseVariantStatus.PENDING variant.put()
def update_fuzzer_jobs(fuzzer_entities, job_names): """Update fuzzer job mappings.""" to_delete = {} for fuzzer_entity_key in fuzzer_entities: fuzzer_entity = fuzzer_entity_key.get() for job in data_types.Job.query(): if not job.environment_string: continue job_environment = job.get_environment() if not utils.string_is_true(job_environment.get( 'MANAGED', 'False')): continue if job.name in job_names: continue logs.log('Deleting job %s' % job.name) to_delete[job.name] = job.key try: fuzzer_entity.jobs.remove(job.name) except ValueError: pass fuzzer_entity.put() fuzzer_selection.update_mappings_for_fuzzer(fuzzer_entity) if to_delete: ndb_utils.delete_multi(to_delete.values())
def _get_project_results_for_jobs(jobs): """Return projects for jobs.""" projects = {} for job in sorted(jobs, key=lambda j: j.name): project_name = job.get_environment().get('PROJECT_NAME', job.name) if project_name not in projects: projects[project_name] = {'name': project_name, 'jobs': []} if utils.string_is_true(job.get_environment().get('CORPUS_PRUNE')): projects[project_name]['coverage_job'] = job.name engine_display_name, engine_name = _get_engine_names(job.name) projects[project_name]['jobs'].append({ 'engine_display_name': engine_display_name, 'engine_name': engine_name, 'sanitizer_string': environment.get_memory_tool_display_string(job.name), 'name': job.name, 'single_target': get_single_fuzz_target_or_none(project_name, engine_name), 'has_stats': True }) projects = list(projects.values()) projects.sort(key=_sort_by_name) for project in projects: project['jobs'].sort(key=_sort_by_name) return projects
def get_tasks_to_schedule(): """Return (task_target, job_name, queue_name) arguments to schedule a task.""" for job in data_types.Job.query(): if not utils.string_is_true(job.get_environment().get('CORPUS_PRUNE')): continue queue_name = tasks.queue_for_job(job.name) for target_job in fuzz_target_utils.get_fuzz_target_jobs(job=job.name): task_target = target_job.fuzz_target_name yield (task_target, job.name, queue_name)
def _get_excluded_jobs(): """Return list of jobs excluded from bug filing.""" excluded_jobs = [] jobs = ndb_utils.get_all_from_model(data_types.Job) for job in jobs: job_environment = job.get_environment() # Exclude experimental jobs. if utils.string_is_true(job_environment.get('EXPERIMENTAL')): excluded_jobs.append(job.name) return excluded_jobs
def get(self): """Handle a GET request.""" jobs = ndb_utils.get_all_from_model(data_types.Job) default_backup_bucket = utils.default_backup_bucket() for job in jobs: job_environment = job.get_environment() if utils.string_is_true(job_environment.get('EXPERIMENTAL')): # Don't use corpus backups from experimental jobs. Skip. continue if not utils.string_is_true(job_environment.get('CORPUS_PRUNE')): # There won't be any corpus backups for these jobs. Skip. continue corpus_backup_bucket_name = job_environment.get('BACKUP_BUCKET', default_backup_bucket) if not corpus_backup_bucket_name: # No backup bucket found. Skip. continue corpus_fuzzer_name_override = job_environment.get( 'CORPUS_FUZZER_NAME_OVERRIDE') target_jobs = list(fuzz_target_utils.get_fuzz_target_jobs(job=job.name)) fuzz_targets = fuzz_target_utils.get_fuzz_targets_for_target_jobs( target_jobs) for target in fuzz_targets: if not target: # This is expected if any fuzzer/job combinations become outdated. continue try: _make_corpus_backup_public(target, corpus_fuzzer_name_override, corpus_backup_bucket_name) except: logs.log_error('Failed to make %s corpus backup public.' % target)
def _get_revision_vars_url_format(job_type, platform_id=None): """Return REVISION_VARS_URL from job environment if available. Otherwise, default to one set in project.yaml. For custom binary jobs, this is not applicable.""" if job_type is None: # Force it to use env attribute in project.yaml. return local_config.ProjectConfig().get('env.REVISION_VARS_URL') custom_binary = data_handler.get_value_from_job_definition( job_type, 'CUSTOM_BINARY') if utils.string_is_true(custom_binary): return None rev_path = data_handler.get_value_from_job_definition_or_environment( job_type, 'REVISION_VARS_URL') rev_path = overrides.check_and_apply_overrides( rev_path, overrides.PLATFORM_ID_TO_REV_PATH_KEY, platform_id=platform_id) return rev_path
def _filing_enabled(testcase): """Check if the project YAML file requires to file a GitHub issue.""" require_issue = data_handler.get_value_from_job_definition( testcase.job_type, 'FILE_GITHUB_ISSUE', default='False') return utils.string_is_true(require_issue)
def do_post(self): """Upload a testcase.""" email = helpers.get_user_email() testcase_id = request.get('testcaseId') uploaded_file = self.get_upload() if testcase_id and not uploaded_file: testcase = helpers.get_testcase(testcase_id) if not access.can_user_access_testcase(testcase): raise helpers.AccessDeniedException() # Use minimized testcase for upload (if available). key = (testcase.minimized_keys if testcase.minimized_keys and testcase.minimized_keys != 'NA' else testcase.fuzzed_keys) uploaded_file = blobs.get_blob_info(key) # Extract filename part from blob. uploaded_file.filename = os.path.basename( uploaded_file.filename.replace('\\', os.sep)) job_type = request.get('job') if not job_type: raise helpers.EarlyExitException('Missing job name.', 400) job = data_types.Job.query(data_types.Job.name == job_type).get() if not job: raise helpers.EarlyExitException('Invalid job name.', 400) fuzzer_name = request.get('fuzzer') job_type_lowercase = job_type.lower() for engine in fuzzing.ENGINES: if engine.lower() in job_type_lowercase: fuzzer_name = engine is_engine_job = fuzzer_name and environment.is_engine_fuzzer_job( job_type) target_name = request.get('target') if not is_engine_job and target_name: raise helpers.EarlyExitException( 'Target name is not applicable to non-engine jobs (AFL, libFuzzer).', 400) if is_engine_job and not target_name: raise helpers.EarlyExitException( 'Missing target name for engine job (AFL, libFuzzer).', 400) if (target_name and not data_types.Fuzzer.VALID_NAME_REGEX.match(target_name)): raise helpers.EarlyExitException('Invalid target name.', 400) fully_qualified_fuzzer_name = '' if is_engine_job and target_name: if job.is_external(): # External jobs don't run and set FuzzTarget entities as part of # fuzz_task. Set it here instead. fuzz_target = (data_handler.record_fuzz_target( fuzzer_name, target_name, job_type)) fully_qualified_fuzzer_name = fuzz_target.fully_qualified_name( ) target_name = fuzz_target.binary else: fully_qualified_fuzzer_name, target_name = find_fuzz_target( fuzzer_name, target_name, job_type) if (not access.has_access(need_privileged_access=False, job_type=job_type, fuzzer_name=(fully_qualified_fuzzer_name or fuzzer_name)) and not _is_uploader_allowed(email)): raise helpers.AccessDeniedException() multiple_testcases = bool(request.get('multiple')) http_flag = bool(request.get('http')) high_end_job = bool(request.get('highEnd')) bug_information = request.get('issue') crash_revision = request.get('revision') timeout = request.get('timeout') retries = request.get('retries') bug_summary_update_flag = bool(request.get('updateIssue')) quiet_flag = bool(request.get('quiet')) additional_arguments = request.get('args') app_launch_command = request.get('cmd') platform_id = request.get('platform') issue_labels = request.get('issue_labels') gestures = request.get('gestures') or '[]' stacktrace = request.get('stacktrace') crash_data = None if job.is_external(): if not stacktrace: raise helpers.EarlyExitException( 'Stacktrace required for external jobs.', 400) if not crash_revision: raise helpers.EarlyExitException( 'Revision required for external jobs.', 400) crash_data = stack_analyzer.get_crash_data( stacktrace, fuzz_target=target_name, symbolize_flag=False, already_symbolized=True, detect_ooms_and_hangs=True) elif stacktrace: raise helpers.EarlyExitException( 'Should not specify stacktrace for non-external jobs.', 400) testcase_metadata = request.get('metadata', {}) if testcase_metadata: try: testcase_metadata = json.loads(testcase_metadata) except Exception as e: raise helpers.EarlyExitException('Invalid metadata JSON.', 400) from e if not isinstance(testcase_metadata, dict): raise helpers.EarlyExitException( 'Metadata is not a JSON object.', 400) if issue_labels: testcase_metadata['issue_labels'] = issue_labels try: gestures = ast.literal_eval(gestures) except Exception as e: raise helpers.EarlyExitException('Failed to parse gestures.', 400) from e archive_state = 0 bundled = False file_path_input = '' # Certain modifications such as app launch command, issue updates are only # allowed for privileged users. privileged_user = access.has_access(need_privileged_access=True) if not privileged_user: if bug_information or bug_summary_update_flag: raise helpers.EarlyExitException( 'You are not privileged to update existing issues.', 400) need_privileged_access = utils.string_is_true( data_handler.get_value_from_job_definition( job_type, 'PRIVILEGED_ACCESS')) if need_privileged_access: raise helpers.EarlyExitException( 'You are not privileged to run this job type.', 400) if app_launch_command: raise helpers.EarlyExitException( 'You are not privileged to run arbitrary launch commands.', 400) if (testcase_metadata and not _allow_unprivileged_metadata(testcase_metadata)): raise helpers.EarlyExitException( 'You are not privileged to set testcase metadata.', 400) if additional_arguments: raise helpers.EarlyExitException( 'You are not privileged to add command-line arguments.', 400) if gestures: raise helpers.EarlyExitException( 'You are not privileged to run arbitrary gestures.', 400) if crash_revision and crash_revision.isdigit(): crash_revision = int(crash_revision) else: crash_revision = 0 if bug_information == '0': # Auto-recover from this bad input. bug_information = None if bug_information and not bug_information.isdigit(): raise helpers.EarlyExitException('Bug is not a number.', 400) if not timeout: timeout = 0 elif not timeout.isdigit() or timeout == '0': raise helpers.EarlyExitException( 'Testcase timeout must be a number greater than 0.', 400) else: timeout = int(timeout) if timeout > 120: raise helpers.EarlyExitException( 'Testcase timeout may not be greater than 120 seconds.', 400) if retries: if retries.isdigit(): retries = int(retries) else: retries = None if retries is None or retries > MAX_RETRIES: raise helpers.EarlyExitException( 'Testcase retries must be a number less than %d.' % MAX_RETRIES, 400) else: retries = None job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job) if uploaded_file is not None: filename = ''.join([ x for x in uploaded_file.filename if x not in ' ;/?:@&=+$,{}|<>()\\' ]) key = str(uploaded_file.key()) if archive.is_archive(filename): archive_state = data_types.ArchiveStatus.FUZZED if archive_state: if multiple_testcases: # Create a job to unpack an archive. metadata = data_types.BundledArchiveMetadata() metadata.blobstore_key = key metadata.timeout = timeout metadata.job_queue = job_queue metadata.job_type = job_type metadata.http_flag = http_flag metadata.archive_filename = filename metadata.uploader_email = email metadata.gestures = gestures metadata.crash_revision = crash_revision metadata.additional_arguments = additional_arguments metadata.bug_information = bug_information metadata.platform_id = platform_id metadata.app_launch_command = app_launch_command metadata.fuzzer_name = fuzzer_name metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name metadata.fuzzer_binary_name = target_name metadata.put() tasks.add_task('unpack', str(metadata.key.id()), job_type, queue=tasks.queue_for_job(job_type)) # Create a testcase metadata object to show the user their upload. upload_metadata = data_types.TestcaseUploadMetadata() upload_metadata.timestamp = datetime.datetime.utcnow() upload_metadata.filename = filename upload_metadata.blobstore_key = key upload_metadata.original_blobstore_key = key upload_metadata.status = 'Pending' upload_metadata.bundled = True upload_metadata.uploader_email = email upload_metadata.retries = retries upload_metadata.bug_summary_update_flag = bug_summary_update_flag upload_metadata.quiet_flag = quiet_flag upload_metadata.additional_metadata_string = json.dumps( testcase_metadata) upload_metadata.bug_information = bug_information upload_metadata.put() helpers.log('Uploaded multiple testcases.', helpers.VIEW_OPERATION) return self.render_json({'multiple': True}) file_path_input = guess_input_file(uploaded_file, filename) if not file_path_input: raise helpers.EarlyExitException(( "Unable to detect which file to launch. The main file\'s name " 'must contain either of %s.' % str(RUN_FILE_PATTERNS)), 400) else: raise helpers.EarlyExitException('Please select a file to upload.', 400) testcase_id = data_handler.create_user_uploaded_testcase( key, key, archive_state, filename, file_path_input, timeout, job, job_queue, http_flag, gestures, additional_arguments, bug_information, crash_revision, email, platform_id, app_launch_command, fuzzer_name, fully_qualified_fuzzer_name, target_name, bundled, retries, bug_summary_update_flag, quiet_flag, additional_metadata=testcase_metadata, crash_data=crash_data) if not quiet_flag: testcase = data_handler.get_testcase_by_id(testcase_id) issue = issue_tracker_utils.get_issue_for_testcase(testcase) if issue: report_url = data_handler.TESTCASE_REPORT_URL.format( domain=data_handler.get_domain(), testcase_id=testcase_id) comment = ('ClusterFuzz is analyzing your testcase. ' 'Developers can follow the progress at %s.' % report_url) issue.save(new_comment=comment) helpers.log('Uploaded testcase %s' % testcase_id, helpers.VIEW_OPERATION) return self.render_json({'id': '%s' % testcase_id})