def post(self): """Handle a post request.""" key = helpers.get_integer_key(request) job = ndb.Key(data_types.Job, key).get() if not job: raise helpers.EarlyExitException('Job not found.', 400) # Delete from fuzzers' jobs' list. for fuzzer in ndb_utils.get_all_from_model(data_types.Fuzzer): if job.name in fuzzer.jobs: fuzzer.jobs.remove(job.name) fuzzer.put() # Delete associated fuzzer-job mapping(s). query = data_types.FuzzerJob.query() query = query.filter(data_types.FuzzerJob.job == job.name) for mapping in ndb_utils.get_all_from_query(query): mapping.key.delete() # Delete job. job.key.delete() helpers.log('Deleted job %s' % job.name, helpers.MODIFY_OPERATION) return self.redirect('/jobs')
def _get_performance_report_data(fuzzer_name, job_type, logs_date): """Return performance report data.""" # Current version works on daily basis the same way as the old version. if logs_date == 'latest': # Use yesterday's date by UTC to analyze yesterday's fuzzer runs. date_start = datetime.datetime.utcnow().date() - datetime.timedelta( days=1) else: try: date_start = datetime.datetime.strptime(logs_date, '%Y-%m-%d').date() except ValueError: logging.warning( 'Wrong date format passed to performance report: %s\n', logs_date) raise helpers.EarlyExitException('Wrong date format.', 400) datetime_start = datetime.datetime.combine(date_start, datetime.time.min) datetime_end = datetime_start + datetime.timedelta(days=1) features = _get_performance_features(fuzzer_name, job_type, datetime_start, datetime_end) return features, date_start
def get(self): """GET handler.""" job_type = request.get('job') revision = request.get('revision') revision_range = request.get('range') if revision: if not revision.isdigit(): raise helpers.EarlyExitException('Revision is not an integer.', 400) start_revision = end_revision = revision elif revision_range: try: start_revision, end_revision = revision_range.split(':') except: raise helpers.EarlyExitException('Bad revision range.', 400) if not start_revision.isdigit(): raise helpers.EarlyExitException( 'Start revision is not an integer.', 400) if not end_revision.isdigit(): raise helpers.EarlyExitException( 'End revision is not an integer.', 400) else: raise helpers.EarlyExitException('No revision specified.', 400) component_revisions_list = revisions.get_component_range_list( start_revision, end_revision, job_type) if not component_revisions_list: raise helpers.EarlyExitException( 'Failed to get component revisions.', 400) return self.render( 'revisions-info.html', {'info': { 'componentRevisionsList': component_revisions_list }})
def get(self): self.response.headers['Content-Type'] = 'application/json' raise helpers.EarlyExitException('message', 500, [])
def do_post(self): """Upload a testcase.""" testcase_id = self.request.get('testcaseId') uploaded_file = self.get_upload() if testcase_id and not uploaded_file: testcase = helpers.get_testcase(testcase_id) if not access.can_user_access_testcase(testcase): raise helpers.AccessDeniedException() # Use minimized testcase for upload (if available). key = (testcase.minimized_keys if testcase.minimized_keys and testcase.minimized_keys != 'NA' else testcase.fuzzed_keys) uploaded_file = blobs.get_blob_info(key) # Extract filename part from blob. uploaded_file.filename = os.path.basename( uploaded_file.filename.replace('\\', os.sep)) job_type = self.request.get('job') if not job_type: raise helpers.EarlyExitException('Missing job name.', 400) if (not data_types.Job.VALID_NAME_REGEX.match(job_type) or not data_types.Job.query( data_types.Job.name == job_type).get()): raise helpers.EarlyExitException('Invalid job name.', 400) fuzzer_name = '' job_type_lowercase = job_type.lower() if 'libfuzzer' in job_type_lowercase: fuzzer_name = 'libFuzzer' elif 'afl' in job_type_lowercase: fuzzer_name = 'afl' target_name = self.request.get('target') if not fuzzer_name and target_name: raise helpers.EarlyExitException( 'Target name is not applicable to non-engine jobs (AFL, libFuzzer).', 400) if fuzzer_name and not target_name: raise helpers.EarlyExitException( 'Missing target name for engine job (AFL, libFuzzer).', 400) if (target_name and not data_types.Fuzzer.VALID_NAME_REGEX.match(target_name)): raise helpers.EarlyExitException('Invalid target name.', 400) fully_qualified_fuzzer_name = '' if fuzzer_name and target_name: fully_qualified_fuzzer_name, target_name = find_fuzz_target( fuzzer_name, target_name, job_type) if not fully_qualified_fuzzer_name: raise helpers.EarlyExitException('Target does not exist.', 400) if not access.has_access(need_privileged_access=False, job_type=job_type, fuzzer_name=(fully_qualified_fuzzer_name or fuzzer_name)): raise helpers.AccessDeniedException() multiple_testcases = bool(self.request.get('multiple')) http_flag = bool(self.request.get('http')) high_end_job = bool(self.request.get('highEnd')) bug_information = self.request.get('issue') crash_revision = self.request.get('revision') timeout = self.request.get('timeout') retries = self.request.get('retries') bug_summary_update_flag = bool(self.request.get('updateIssue')) additional_arguments = self.request.get('args') app_launch_command = self.request.get('cmd') platform_id = self.request.get('platform') testcase_metadata = self.request.get('metadata') if testcase_metadata: try: testcase_metadata = json.loads(testcase_metadata) if not isinstance(testcase_metadata, dict): raise helpers.EarlyExitException( 'Metadata is not a JSON object.', 400) except Exception: raise helpers.EarlyExitException('Invalid metadata JSON.', 400) archive_state = 0 bundled = False file_path_input = '' email = helpers.get_user_email() # If we have a AFL or libFuzzer target, use that for arguments. # Launch command looks like # python launcher.py {testcase_path} {target_name} if target_name: additional_arguments = '%%TESTCASE%% %s' % target_name # Certain modifications such as app launch command, issue updates are only # allowed for privileged users. privileged_user = access.has_access(need_privileged_access=True) if not privileged_user: if bug_information or bug_summary_update_flag: raise helpers.EarlyExitException( 'You are not privileged to update existing issues.', 400) need_privileged_access = utils.string_is_true( data_handler.get_value_from_job_definition( job_type, 'PRIVILEGED_ACCESS')) if need_privileged_access: raise helpers.EarlyExitException( 'You are not privileged to run this job type.', 400) if app_launch_command: raise helpers.EarlyExitException( 'You are not privileged to run arbitary launch commands.', 400) if testcase_metadata: raise helpers.EarlyExitException( 'You are not privileged to set testcase metadata.', 400) if crash_revision and crash_revision.isdigit(): crash_revision = int(crash_revision) else: crash_revision = 0 if bug_information and not bug_information.isdigit(): raise helpers.EarlyExitException('Bug is not a number.', 400) if not timeout: timeout = 0 elif not timeout.isdigit() or timeout == '0': raise helpers.EarlyExitException( 'Testcase timeout must be a number greater than 0.', 400) else: timeout = int(timeout) if timeout > 120: raise helpers.EarlyExitException( 'Testcase timeout may not be greater than 120 seconds.', 400) if retries: if retries.isdigit(): retries = int(retries) else: retries = None if retries is None or retries > MAX_RETRIES: raise helpers.EarlyExitException( 'Testcase retries must be a number less than %d.' % MAX_RETRIES, 400) else: retries = None try: gestures = ast.literal_eval(self.request.get('gestures')) except: gestures = [] if not gestures: gestures = [] job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job) if uploaded_file is not None: filename = ''.join([ x for x in uploaded_file.filename if x not in ' ;/?:@&=+$,{}|<>()\\' ]) key = str(uploaded_file.key()) if archive.is_archive(filename): archive_state = data_types.ArchiveStatus.FUZZED if archive_state: if multiple_testcases: if testcase_metadata: raise helpers.EarlyExitException( 'Testcase metadata not supported with multiple testcases.', 400) # Create a job to unpack an archive. metadata = data_types.BundledArchiveMetadata() metadata.blobstore_key = key metadata.timeout = timeout metadata.job_queue = job_queue metadata.job_type = job_type metadata.http_flag = http_flag metadata.archive_filename = filename metadata.uploader_email = email metadata.gestures = gestures metadata.crash_revision = crash_revision metadata.additional_arguments = additional_arguments metadata.bug_information = bug_information metadata.platform_id = platform_id metadata.app_launch_command = app_launch_command metadata.fuzzer_name = fuzzer_name metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name metadata.fuzzer_binary_name = target_name metadata.put() tasks.add_task('unpack', str(metadata.key.id()), job_type, queue=tasks.queue_for_job(job_type)) # Create a testcase metadata object to show the user their upload. upload_metadata = data_types.TestcaseUploadMetadata() upload_metadata.timestamp = datetime.datetime.utcnow() upload_metadata.filename = filename upload_metadata.blobstore_key = key upload_metadata.original_blobstore_key = key upload_metadata.status = 'Pending' upload_metadata.bundled = True upload_metadata.uploader_email = email upload_metadata.retries = retries upload_metadata.bug_summary_update_flag = bug_summary_update_flag upload_metadata.put() helpers.log('Uploaded multiple testcases.', helpers.VIEW_OPERATION) self.render_json({'multiple': True}) return file_path_input = guess_input_file(uploaded_file, filename) if not file_path_input: raise helpers.EarlyExitException(( "Unable to detect which file to launch. The main file\'s name " 'must contain either of %s.' % str(RUN_FILE_PATTERNS)), 400) else: raise helpers.EarlyExitException('Please select a file to upload.', 400) testcase_id = data_handler.create_user_uploaded_testcase( key, key, archive_state, filename, file_path_input, timeout, job_type, job_queue, http_flag, gestures, additional_arguments, bug_information, crash_revision, email, platform_id, app_launch_command, fuzzer_name, fully_qualified_fuzzer_name, target_name, bundled, retries, bug_summary_update_flag, additional_metadata=testcase_metadata) testcase = data_handler.get_testcase_by_id(testcase_id) issue = issue_tracker_utils.get_issue_for_testcase(testcase) if issue: report_url = data_handler.TESTCASE_REPORT_URL.format( domain=data_handler.get_domain(), testcase_id=testcase_id) comment = ('ClusterFuzz is analyzing your testcase. ' 'Developers can follow the progress at %s.' % report_url) issue.save(new_comment=comment) helpers.log('Uploaded testcase %s' % testcase_id, helpers.VIEW_OPERATION) self.render_json({'id': '%s' % testcase_id})
def post(self): """Handle a post request.""" config = db_config.get() if not config: config = data_types.Config() previous_hash = self.request.get('previous_hash') if config.previous_hash and config.previous_hash != previous_hash: raise helpers.EarlyExitException( 'Your change conflicts with another configuration update. ' 'Please refresh and try again.', 500) build_apiary_service_account_private_key = self.request.get( 'build_apiary_service_account_private_key') bug_report_url = self.request.get('bug_report_url') client_credentials = self.request.get('client_credentials') jira_url = self.request.get('jira_url') jira_credentials = self.request.get('jira_credentials') component_repository_mappings = self.request.get( 'component_repository_mappings') contact_string = self.request.get('contact_string') documentation_url = self.request.get('documentation_url') github_credentials = self.request.get('github_credentials') platform_group_mappings = self.request.get('platform_group_mappings') privileged_users = self.request.get('privileged_users') blacklisted_users = self.request.get('blacklisted_users') relax_security_bug_restrictions = self.request.get( 'relax_security_bug_restrictions') relax_testcase_restrictions = self.request.get( 'relax_testcase_restrictions') reproduce_tool_client_id = self.request.get('reproduce_tool_client_id') reproduce_tool_client_secret = self.request.get( 'reproduce_tool_client_secret') reproduction_help_url = self.request.get('reproduction_help_url') test_account_email = self.request.get('test_account_email') test_account_password = self.request.get('test_account_password') wifi_ssid = self.request.get('wifi_ssid') wifi_password = self.request.get('wifi_password') sendgrid_api_key = self.request.get('sendgrid_api_key') sendgrid_sender = self.request.get('sendgrid_sender') config.build_apiary_service_account_private_key = ( build_apiary_service_account_private_key) config.bug_report_url = bug_report_url config.client_credentials = client_credentials config.component_repository_mappings = component_repository_mappings config.contact_string = contact_string config.documentation_url = documentation_url config.github_credentials = github_credentials config.jira_credentials = jira_credentials config.jira_url = jira_url config.platform_group_mappings = platform_group_mappings config.privileged_users = privileged_users config.blacklisted_users = blacklisted_users config.relax_security_bug_restrictions = bool( relax_security_bug_restrictions) config.relax_testcase_restrictions = bool(relax_testcase_restrictions) config.reproduce_tool_client_id = reproduce_tool_client_id config.reproduce_tool_client_secret = reproduce_tool_client_secret config.reproduction_help_url = reproduction_help_url config.test_account_email = test_account_email config.test_account_password = test_account_password config.wifi_ssid = wifi_ssid config.wifi_password = wifi_password config.sendgrid_api_key = sendgrid_api_key config.sendgrid_sender = sendgrid_sender helpers.log('Configuration', helpers.MODIFY_OPERATION) # Before hashing the entity, we must put it so that the internal maps are # updated. config.put() config.previous_hash = utils.entity_hash(config) config.put() template_values = { 'title': 'Success', 'message': ('Configuration is successfully updated. ' 'Redirecting to the configuration page...'), 'redirect_url': '/configuration', } self.render('message.html', template_values)
def apply_fuzzer_changes(self, fuzzer, upload_info): """Apply changes to a fuzzer.""" if upload_info and not archive.is_archive(upload_info.filename): raise helpers.EarlyExitException( 'Sorry, only zip, tgz, tar.gz, tbz, and tar.bz2 archives are ' 'allowed!', 400) if fuzzer.builtin: executable_path = launcher_script = None else: executable_path = self._get_executable_path(upload_info) launcher_script = self._get_launcher_script(upload_info) # Executable path is required for non-builtin fuzzers and if it is not # already set. if not fuzzer.executable_path and not executable_path: raise helpers.EarlyExitException( 'Please enter the path to the executable, or if the archive you ' 'uploaded is less than 16MB, ensure that the executable file has ' '"run" in its name.', 400) jobs = self.request.get('jobs', []) timeout = self._get_integer_value('timeout') max_testcases = self._get_integer_value('max_testcases') external_contribution = self.request.get('external_contribution', False) differential = self.request.get('differential', False) environment_string = self.request.get('additional_environment_string') data_bundle_name = self.request.get('data_bundle_name') # Save the fuzzer file metadata. if upload_info: fuzzer.filename = upload_info.filename fuzzer.blobstore_key = str(upload_info.key()) fuzzer.file_size = utils.get_size_string(upload_info.size) fuzzer.jobs = jobs fuzzer.revision = fuzzer.revision + 1 fuzzer.source = helpers.get_user_email() fuzzer.timeout = timeout fuzzer.max_testcases = max_testcases fuzzer.result = None fuzzer.sample_testcase = None fuzzer.console_output = None fuzzer.external_contribution = bool(external_contribution) fuzzer.differential = bool(differential) fuzzer.additional_environment_string = environment_string fuzzer.timestamp = datetime.datetime.utcnow() fuzzer.data_bundle_name = data_bundle_name # Update only if a new archive is provided. if executable_path: fuzzer.executable_path = executable_path # Optional. Also, update only if a new archive is provided and contains a # launcher script. if launcher_script: fuzzer.launcher_script = launcher_script fuzzer.put() fuzzer_selection.update_mappings_for_fuzzer(fuzzer) helpers.log('Uploaded fuzzer %s.' % fuzzer.name, helpers.MODIFY_OPERATION) self.redirect('/fuzzers')
def check_redirect_url(url): """Check redirect URL is safe.""" if not _SAFE_URL_PATTERN.match(url): raise helpers.EarlyExitException('Invalid redirect.', 403)
def post(self): """Handle a post request.""" name = self.request.get('name') if not name: raise helpers.EarlyExitException('Please give this job a name!', 400) if not data_types.Job.VALID_NAME_REGEX.match(name): raise helpers.EarlyExitException( 'Job name can only contain letters, numbers, dashes and underscores.', 400) fuzzers = self.request.get('fuzzers', []).split(',') templates = self.request.get('templates', '').splitlines() for template in templates: if not data_types.JobTemplate.query( data_types.JobTemplate.name == template).get(): raise helpers.EarlyExitException( 'Invalid template name(s) specified.', 400) new_platform = self.request.get('platform') if not new_platform or new_platform == 'undefined': raise helpers.EarlyExitException('No platform provided for job.', 400) description = self.request.get('description', '') environment_string = self.request.get('environment_string', '') previous_custom_binary_revision = 0 job = data_types.Job.query(data_types.Job.name == name).get() recreate_fuzzer_mappings = False if not job: job = data_types.Job() else: previous_custom_binary_revision = job.custom_binary_revision if previous_custom_binary_revision is None: previous_custom_binary_revision = 0 if new_platform != job.platform: # The rare case of modifying a job's platform causes many problems with # task selection. If a job is leased from the old queue, the task will # be recreated in the correct queue at lease time. Fuzzer mappings must # be purged and recreated, since they depend on the job's platform. recreate_fuzzer_mappings = True job.name = name job.platform = new_platform job.description = description job.environment_string = environment_string job.templates = templates blob_info = self.get_upload() if blob_info: job.custom_binary_key = str(blob_info.key()) job.custom_binary_filename = blob_info.filename job.custom_binary_revision = previous_custom_binary_revision + 1 if job.custom_binary_key and 'CUSTOM_BINARY' not in job.environment_string: job.environment_string += '\nCUSTOM_BINARY = True' job.put() fuzzer_selection.update_mappings_for_job(job, fuzzers) if recreate_fuzzer_mappings: fuzzer_selection.update_platform_for_job(name, new_platform) # pylint: disable=unexpected-keyword-arg _ = data_handler.get_all_job_type_names(__memoize_force__=True) helpers.log('Job created %s' % name, helpers.MODIFY_OPERATION) template_values = { 'title': 'Success', 'message': ('Job %s is successfully updated. ' 'Redirecting back to jobs page...') % name, 'redirect_url': '/jobs', } self.render('message.html', template_values)
def get(self): self.is_json = True raise helpers.EarlyExitException('message', 500, [])
def post(self): """Handle a post request.""" email = utils.normalize_email(self.request.get('email')) entity_kind = self.request.get('entity_kind') entity_name = self.request.get('entity_name') is_prefix = self.request.get('is_prefix') auto_cc = self.request.get('auto_cc') if not email: raise helpers.EarlyExitException('No email provided.', 400) if not entity_kind or entity_kind == 'undefined': raise helpers.EarlyExitException('No entity_kind provided.', 400) entity_kind = get_value_by_name(USER_PERMISSION_ENTITY_KINDS, entity_kind) if entity_kind is None: raise helpers.EarlyExitException('Invalid entity_kind provided.', 400) if entity_kind == data_types.PermissionEntityKind.UPLOADER: # Enforce null values for entity name and auto-cc when uploader is chosen. entity_name = None auto_cc = data_types.AutoCCType.NONE else: if not entity_name: raise helpers.EarlyExitException('No entity_name provided.', 400) if not auto_cc or auto_cc == 'undefined': raise helpers.EarlyExitException('No auto_cc provided.', 400) auto_cc = get_value_by_name(USER_PERMISSION_AUTO_CC_TYPES, auto_cc) if auto_cc is None: raise helpers.EarlyExitException('Invalid auto_cc provided.', 400) # Check for existing permission. query = data_types.ExternalUserPermission.query( data_types.ExternalUserPermission.email == email, data_types.ExternalUserPermission.entity_kind == entity_kind, data_types.ExternalUserPermission.entity_name == entity_name) permission = query.get() if not permission: # Doesn't exist, create new one. permission = data_types.ExternalUserPermission( email=email, entity_kind=entity_kind, entity_name=entity_name) permission.is_prefix = bool(is_prefix) permission.auto_cc = auto_cc permission.put() helpers.log('Configuration', helpers.MODIFY_OPERATION) template_values = { 'title': 'Success', 'message': ('User %s permission for entity %s is successfully added. ' 'Redirecting to the configuration page...') % (email, entity_name), 'redirect_url': '/configuration', } self.render('message.html', template_values)
def do_post(self): """Upload a testcase.""" email = helpers.get_user_email() testcase_id = request.get('testcaseId') uploaded_file = self.get_upload() if testcase_id and not uploaded_file: testcase = helpers.get_testcase(testcase_id) if not access.can_user_access_testcase(testcase): raise helpers.AccessDeniedException() # Use minimized testcase for upload (if available). key = (testcase.minimized_keys if testcase.minimized_keys and testcase.minimized_keys != 'NA' else testcase.fuzzed_keys) uploaded_file = blobs.get_blob_info(key) # Extract filename part from blob. uploaded_file.filename = os.path.basename( uploaded_file.filename.replace('\\', os.sep)) job_type = request.get('job') if not job_type: raise helpers.EarlyExitException('Missing job name.', 400) job = data_types.Job.query(data_types.Job.name == job_type).get() if not job: raise helpers.EarlyExitException('Invalid job name.', 400) fuzzer_name = request.get('fuzzer') job_type_lowercase = job_type.lower() if 'libfuzzer' in job_type_lowercase: fuzzer_name = 'libFuzzer' elif 'afl' in job_type_lowercase: fuzzer_name = 'afl' elif 'honggfuzz' in job_type_lowercase: fuzzer_name = 'honggfuzz' is_engine_job = fuzzer_name and environment.is_engine_fuzzer_job( job_type) target_name = request.get('target') if not is_engine_job and target_name: raise helpers.EarlyExitException( 'Target name is not applicable to non-engine jobs (AFL, libFuzzer).', 400) if is_engine_job and not target_name: raise helpers.EarlyExitException( 'Missing target name for engine job (AFL, libFuzzer).', 400) if (target_name and not data_types.Fuzzer.VALID_NAME_REGEX.match(target_name)): raise helpers.EarlyExitException('Invalid target name.', 400) fully_qualified_fuzzer_name = '' if is_engine_job and target_name: if job.is_external(): # External jobs don't run and set FuzzTarget entities as part of # fuzz_task. Set it here instead. fuzz_target = (data_handler.record_fuzz_target( fuzzer_name, target_name, job_type)) fully_qualified_fuzzer_name = fuzz_target.fully_qualified_name( ) target_name = fuzz_target.binary else: fully_qualified_fuzzer_name, target_name = find_fuzz_target( fuzzer_name, target_name, job_type) if (not access.has_access(need_privileged_access=False, job_type=job_type, fuzzer_name=(fully_qualified_fuzzer_name or fuzzer_name)) and not _is_uploader_allowed(email)): raise helpers.AccessDeniedException() multiple_testcases = bool(request.get('multiple')) http_flag = bool(request.get('http')) high_end_job = bool(request.get('highEnd')) bug_information = request.get('issue') crash_revision = request.get('revision') timeout = request.get('timeout') retries = request.get('retries') bug_summary_update_flag = bool(request.get('updateIssue')) quiet_flag = bool(request.get('quiet')) additional_arguments = request.get('args') app_launch_command = request.get('cmd') platform_id = request.get('platform') issue_labels = request.get('issue_labels') gestures = request.get('gestures') or '[]' stacktrace = request.get('stacktrace') crash_data = None if job.is_external(): if not stacktrace: raise helpers.EarlyExitException( 'Stacktrace required for external jobs.', 400) if not crash_revision: raise helpers.EarlyExitException( 'Revision required for external jobs.', 400) crash_data = stack_analyzer.get_crash_data( stacktrace, fuzz_target=target_name, symbolize_flag=False, already_symbolized=True, detect_ooms_and_hangs=True) elif stacktrace: raise helpers.EarlyExitException( 'Should not specify stacktrace for non-external jobs.', 400) testcase_metadata = request.get('metadata', {}) if testcase_metadata: try: testcase_metadata = json.loads(testcase_metadata) except Exception as e: raise helpers.EarlyExitException('Invalid metadata JSON.', 400) from e if not isinstance(testcase_metadata, dict): raise helpers.EarlyExitException( 'Metadata is not a JSON object.', 400) if issue_labels: testcase_metadata['issue_labels'] = issue_labels try: gestures = ast.literal_eval(gestures) except Exception as e: raise helpers.EarlyExitException('Failed to parse gestures.', 400) from e archive_state = 0 bundled = False file_path_input = '' # Certain modifications such as app launch command, issue updates are only # allowed for privileged users. privileged_user = access.has_access(need_privileged_access=True) if not privileged_user: if bug_information or bug_summary_update_flag: raise helpers.EarlyExitException( 'You are not privileged to update existing issues.', 400) need_privileged_access = utils.string_is_true( data_handler.get_value_from_job_definition( job_type, 'PRIVILEGED_ACCESS')) if need_privileged_access: raise helpers.EarlyExitException( 'You are not privileged to run this job type.', 400) if app_launch_command: raise helpers.EarlyExitException( 'You are not privileged to run arbitrary launch commands.', 400) if (testcase_metadata and not _allow_unprivileged_metadata(testcase_metadata)): raise helpers.EarlyExitException( 'You are not privileged to set testcase metadata.', 400) if additional_arguments: raise helpers.EarlyExitException( 'You are not privileged to add command-line arguments.', 400) if gestures: raise helpers.EarlyExitException( 'You are not privileged to run arbitrary gestures.', 400) # TODO(aarya): Remove once AFL is migrated to engine pipeline. if target_name: additional_arguments = '%TESTCASE%' if crash_revision and crash_revision.isdigit(): crash_revision = int(crash_revision) else: crash_revision = 0 if bug_information == '0': # Auto-recover from this bad input. bug_information = None if bug_information and not bug_information.isdigit(): raise helpers.EarlyExitException('Bug is not a number.', 400) if not timeout: timeout = 0 elif not timeout.isdigit() or timeout == '0': raise helpers.EarlyExitException( 'Testcase timeout must be a number greater than 0.', 400) else: timeout = int(timeout) if timeout > 120: raise helpers.EarlyExitException( 'Testcase timeout may not be greater than 120 seconds.', 400) if retries: if retries.isdigit(): retries = int(retries) else: retries = None if retries is None or retries > MAX_RETRIES: raise helpers.EarlyExitException( 'Testcase retries must be a number less than %d.' % MAX_RETRIES, 400) else: retries = None job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job) if uploaded_file is not None: filename = ''.join([ x for x in uploaded_file.filename if x not in ' ;/?:@&=+$,{}|<>()\\' ]) key = str(uploaded_file.key()) if archive.is_archive(filename): archive_state = data_types.ArchiveStatus.FUZZED if archive_state: if multiple_testcases: # Create a job to unpack an archive. metadata = data_types.BundledArchiveMetadata() metadata.blobstore_key = key metadata.timeout = timeout metadata.job_queue = job_queue metadata.job_type = job_type metadata.http_flag = http_flag metadata.archive_filename = filename metadata.uploader_email = email metadata.gestures = gestures metadata.crash_revision = crash_revision metadata.additional_arguments = additional_arguments metadata.bug_information = bug_information metadata.platform_id = platform_id metadata.app_launch_command = app_launch_command metadata.fuzzer_name = fuzzer_name metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name metadata.fuzzer_binary_name = target_name metadata.put() tasks.add_task('unpack', str(metadata.key.id()), job_type, queue=tasks.queue_for_job(job_type)) # Create a testcase metadata object to show the user their upload. upload_metadata = data_types.TestcaseUploadMetadata() upload_metadata.timestamp = datetime.datetime.utcnow() upload_metadata.filename = filename upload_metadata.blobstore_key = key upload_metadata.original_blobstore_key = key upload_metadata.status = 'Pending' upload_metadata.bundled = True upload_metadata.uploader_email = email upload_metadata.retries = retries upload_metadata.bug_summary_update_flag = bug_summary_update_flag upload_metadata.quiet_flag = quiet_flag upload_metadata.additional_metadata_string = json.dumps( testcase_metadata) upload_metadata.bug_information = bug_information upload_metadata.put() helpers.log('Uploaded multiple testcases.', helpers.VIEW_OPERATION) return self.render_json({'multiple': True}) file_path_input = guess_input_file(uploaded_file, filename) if not file_path_input: raise helpers.EarlyExitException(( "Unable to detect which file to launch. The main file\'s name " 'must contain either of %s.' % str(RUN_FILE_PATTERNS)), 400) else: raise helpers.EarlyExitException('Please select a file to upload.', 400) testcase_id = data_handler.create_user_uploaded_testcase( key, key, archive_state, filename, file_path_input, timeout, job, job_queue, http_flag, gestures, additional_arguments, bug_information, crash_revision, email, platform_id, app_launch_command, fuzzer_name, fully_qualified_fuzzer_name, target_name, bundled, retries, bug_summary_update_flag, quiet_flag, additional_metadata=testcase_metadata, crash_data=crash_data) if not quiet_flag: testcase = data_handler.get_testcase_by_id(testcase_id) issue = issue_tracker_utils.get_issue_for_testcase(testcase) if issue: report_url = data_handler.TESTCASE_REPORT_URL.format( domain=data_handler.get_domain(), testcase_id=testcase_id) comment = ('ClusterFuzz is analyzing your testcase. ' 'Developers can follow the progress at %s.' % report_url) issue.save(new_comment=comment) helpers.log('Uploaded testcase %s' % testcase_id, helpers.VIEW_OPERATION) return self.render_json({'id': '%s' % testcase_id})
def get(self, report_type=None, argument=None, date=None, extra=None): """Handle a get request.""" report_url = get_report_url(report_type, argument, date) if report_url: return self.redirect(report_url) raise helpers.EarlyExitException('Failed to get coverage report.', 400)
def post(self): """Handle a post request.""" config = db_config.get() if not config: config = data_types.Config() previous_hash = self.request.get("previous_hash") if config.previous_hash and config.previous_hash != previous_hash: raise helpers.EarlyExitException( "Your change conflicts with another configuration update. " "Please refresh and try again.", 500, ) build_apiary_service_account_email = self.request.get( "build_apiary_service_account_email") build_apiary_service_account_private_key = self.request.get( "build_apiary_service_account_private_key") bug_report_url = self.request.get("bug_report_url") client_credentials = self.request.get("client_credentials") component_repository_mappings = self.request.get( "component_repository_mappings") contact_string = self.request.get("contact_string") documentation_url = self.request.get("documentation_url") github_credentials = self.request.get("github_credentials") platform_group_mappings = self.request.get("platform_group_mappings") privileged_users = self.request.get("privileged_users") relax_security_bug_restrictions = self.request.get( "relax_security_bug_restrictions") relax_testcase_restrictions = self.request.get( "relax_testcase_restrictions") reproduce_tool_client_id = self.request.get("reproduce_tool_client_id") reproduce_tool_client_secret = self.request.get( "reproduce_tool_client_secret") reproduction_help_url = self.request.get("reproduction_help_url") test_account_email = self.request.get("test_account_email") test_account_password = self.request.get("test_account_password") wifi_ssid = self.request.get("wifi_ssid") wifi_password = self.request.get("wifi_password") sendgrid_api_key = self.request.get("sendgrid_api_key") sendgrid_sender = self.request.get("sendgrid_sender") config.build_apiary_service_account_email = build_apiary_service_account_email config.build_apiary_service_account_private_key = ( build_apiary_service_account_private_key) config.bug_report_url = bug_report_url config.client_credentials = client_credentials config.component_repository_mappings = component_repository_mappings config.contact_string = contact_string config.documentation_url = documentation_url config.github_credentials = github_credentials config.platform_group_mappings = platform_group_mappings config.privileged_users = privileged_users config.relax_security_bug_restrictions = bool( relax_security_bug_restrictions) config.relax_testcase_restrictions = bool(relax_testcase_restrictions) config.reproduce_tool_client_id = reproduce_tool_client_id config.reproduce_tool_client_secret = reproduce_tool_client_secret config.reproduction_help_url = reproduction_help_url config.test_account_email = test_account_email config.test_account_password = test_account_password config.wifi_ssid = wifi_ssid config.wifi_password = wifi_password config.sendgrid_api_key = sendgrid_api_key config.sendgrid_sender = sendgrid_sender helpers.log("Configuration", helpers.MODIFY_OPERATION) # Before hashing the entity, we must put it so that the internal maps are # updated. config.put() config.previous_hash = utils.entity_hash(config) config.put() template_values = { "title": "Success", "message": ("Configuration is successfully updated. " "Redirecting to the configuration page..."), "redirect_url": "/configuration", } self.render("message.html", template_values)
def post(self): """Handle a post request.""" email = utils.normalize_email(self.request.get("email")) entity_kind = self.request.get("entity_kind") entity_name = self.request.get("entity_name") is_prefix = self.request.get("is_prefix") auto_cc = self.request.get("auto_cc") if not email: raise helpers.EarlyExitException("No email provided.", 400) if not entity_kind or entity_kind == "undefined": raise helpers.EarlyExitException("No entity_kind provided.", 400) entity_kind = get_value_by_name(USER_PERMISSION_ENTITY_KINDS, entity_kind) if entity_kind is None: raise helpers.EarlyExitException("Invalid entity_kind provided.", 400) if entity_kind == data_types.PermissionEntityKind.UPLOADER: # Enforce null values for entity name and auto-cc when uploader is chosen. entity_name = None auto_cc = data_types.AutoCCType.NONE else: if not entity_name: raise helpers.EarlyExitException("No entity_name provided.", 400) if not auto_cc or auto_cc == "undefined": raise helpers.EarlyExitException("No auto_cc provided.", 400) auto_cc = get_value_by_name(USER_PERMISSION_AUTO_CC_TYPES, auto_cc) if auto_cc is None: raise helpers.EarlyExitException("Invalid auto_cc provided.", 400) # Check for existing permission. query = data_types.ExternalUserPermission.query( data_types.ExternalUserPermission.email == email, data_types.ExternalUserPermission.entity_kind == entity_kind, data_types.ExternalUserPermission.entity_name == entity_name, ) permission = query.get() if not permission: # Doesn't exist, create new one. permission = data_types.ExternalUserPermission( email=email, entity_kind=entity_kind, entity_name=entity_name) permission.is_prefix = bool(is_prefix) permission.auto_cc = auto_cc permission.put() helpers.log("Configuration", helpers.MODIFY_OPERATION) template_values = { "title": "Success", "message": ("User %s permission for entity %s is successfully added. " "Redirecting to the configuration page...") % (email, entity_name), "redirect_url": "/configuration", } self.render("message.html", template_values)
def do_post(self): """Upload a testcase.""" email = helpers.get_user_email() testcase_id = self.request.get("testcaseId") uploaded_file = self.get_upload() if testcase_id and not uploaded_file: testcase = helpers.get_testcase(testcase_id) if not access.can_user_access_testcase(testcase): raise helpers.AccessDeniedException() # Use minimized testcase for upload (if available). key = (testcase.minimized_keys if testcase.minimized_keys and testcase.minimized_keys != "NA" else testcase.fuzzed_keys) uploaded_file = blobs.get_blob_info(key) # Extract filename part from blob. uploaded_file.filename = os.path.basename( uploaded_file.filename.replace("\\", os.sep)) job_type = self.request.get("job") if not job_type: raise helpers.EarlyExitException("Missing job name.", 400) if (not data_types.Job.VALID_NAME_REGEX.match(job_type) or not data_types.Job.query( data_types.Job.name == job_type).get()): raise helpers.EarlyExitException("Invalid job name.", 400) fuzzer_name = "" job_type_lowercase = job_type.lower() if "libfuzzer" in job_type_lowercase: fuzzer_name = "libFuzzer" elif "afl" in job_type_lowercase: fuzzer_name = "afl" target_name = self.request.get("target") if not fuzzer_name and target_name: raise helpers.EarlyExitException( "Target name is not applicable to non-engine jobs (AFL, libFuzzer).", 400, ) if fuzzer_name and not target_name: raise helpers.EarlyExitException( "Missing target name for engine job (AFL, libFuzzer).", 400) if target_name and not data_types.Fuzzer.VALID_NAME_REGEX.match( target_name): raise helpers.EarlyExitException("Invalid target name.", 400) fully_qualified_fuzzer_name = "" if fuzzer_name and target_name: fully_qualified_fuzzer_name, target_name = find_fuzz_target( fuzzer_name, target_name, job_type) if not fully_qualified_fuzzer_name: raise helpers.EarlyExitException("Target does not exist.", 400) if not access.has_access( need_privileged_access=False, job_type=job_type, fuzzer_name=(fully_qualified_fuzzer_name or fuzzer_name), ) and not _is_uploader_allowed(email): raise helpers.AccessDeniedException() multiple_testcases = bool(self.request.get("multiple")) http_flag = bool(self.request.get("http")) high_end_job = bool(self.request.get("highEnd")) bug_information = self.request.get("issue") crash_revision = self.request.get("revision") timeout = self.request.get("timeout") retries = self.request.get("retries") bug_summary_update_flag = bool(self.request.get("updateIssue")) quiet_flag = bool(self.request.get("quiet")) additional_arguments = self.request.get("args") app_launch_command = self.request.get("cmd") platform_id = self.request.get("platform") issue_labels = self.request.get("issue_labels") gestures = self.request.get("gestures") or "[]" testcase_metadata = self.request.get("metadata", {}) if testcase_metadata: try: testcase_metadata = json.loads(testcase_metadata) except Exception: raise helpers.EarlyExitException("Invalid metadata JSON.", 400) if not isinstance(testcase_metadata, dict): raise helpers.EarlyExitException( "Metadata is not a JSON object.", 400) if issue_labels: testcase_metadata["issue_labels"] = issue_labels try: gestures = ast.literal_eval(gestures) except Exception: raise helpers.EarlyExitException("Failed to parse gestures.", 400) archive_state = 0 bundled = False file_path_input = "" # Certain modifications such as app launch command, issue updates are only # allowed for privileged users. privileged_user = access.has_access(need_privileged_access=True) if not privileged_user: if bug_information or bug_summary_update_flag: raise helpers.EarlyExitException( "You are not privileged to update existing issues.", 400) need_privileged_access = utils.string_is_true( data_handler.get_value_from_job_definition( job_type, "PRIVILEGED_ACCESS")) if need_privileged_access: raise helpers.EarlyExitException( "You are not privileged to run this job type.", 400) if app_launch_command: raise helpers.EarlyExitException( "You are not privileged to run arbitrary launch commands.", 400) if testcase_metadata and not _allow_unprivileged_metadata( testcase_metadata): raise helpers.EarlyExitException( "You are not privileged to set testcase metadata.", 400) if additional_arguments: raise helpers.EarlyExitException( "You are not privileged to add command-line arguments.", 400) if gestures: raise helpers.EarlyExitException( "You are not privileged to run arbitrary gestures.", 400) # TODO(aarya): Remove once AFL is migrated to engine pipeline. if target_name: additional_arguments = "%TESTCASE%" if crash_revision and crash_revision.isdigit(): crash_revision = int(crash_revision) else: crash_revision = 0 if bug_information == "0": # Auto-recover from this bad input. bug_information = None if bug_information and not bug_information.isdigit(): raise helpers.EarlyExitException("Bug is not a number.", 400) if not timeout: timeout = 0 elif not timeout.isdigit() or timeout == "0": raise helpers.EarlyExitException( "Testcase timeout must be a number greater than 0.", 400) else: timeout = int(timeout) if timeout > 120: raise helpers.EarlyExitException( "Testcase timeout may not be greater than 120 seconds.", 400) if retries: if retries.isdigit(): retries = int(retries) else: retries = None if retries is None or retries > MAX_RETRIES: raise helpers.EarlyExitException( "Testcase retries must be a number less than %d." % MAX_RETRIES, 400) else: retries = None job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job) if uploaded_file is not None: filename = "".join(x for x in uploaded_file.filename if x not in " ;/?:@&=+$,{}|<>()\\") key = str(uploaded_file.key()) if archive.is_archive(filename): archive_state = data_types.ArchiveStatus.FUZZED if archive_state: if multiple_testcases: # Create a job to unpack an archive. metadata = data_types.BundledArchiveMetadata() metadata.blobstore_key = key metadata.timeout = timeout metadata.job_queue = job_queue metadata.job_type = job_type metadata.http_flag = http_flag metadata.archive_filename = filename metadata.uploader_email = email metadata.gestures = gestures metadata.crash_revision = crash_revision metadata.additional_arguments = additional_arguments metadata.bug_information = bug_information metadata.platform_id = platform_id metadata.app_launch_command = app_launch_command metadata.fuzzer_name = fuzzer_name metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name metadata.fuzzer_binary_name = target_name metadata.put() tasks.add_task( "unpack", str(metadata.key.id()), job_type, queue=tasks.queue_for_job(job_type), ) # Create a testcase metadata object to show the user their upload. upload_metadata = data_types.TestcaseUploadMetadata() upload_metadata.timestamp = datetime.datetime.utcnow() upload_metadata.filename = filename upload_metadata.blobstore_key = key upload_metadata.original_blobstore_key = key upload_metadata.status = "Pending" upload_metadata.bundled = True upload_metadata.uploader_email = email upload_metadata.retries = retries upload_metadata.bug_summary_update_flag = bug_summary_update_flag upload_metadata.quiet_flag = quiet_flag upload_metadata.additional_metadata_string = json.dumps( testcase_metadata) upload_metadata.put() helpers.log("Uploaded multiple testcases.", helpers.VIEW_OPERATION) self.render_json({"multiple": True}) return file_path_input = guess_input_file(uploaded_file, filename) if not file_path_input: raise helpers.EarlyExitException( ("Unable to detect which file to launch. The main file's name " "must contain either of %s." % str(RUN_FILE_PATTERNS)), 400, ) else: raise helpers.EarlyExitException("Please select a file to upload.", 400) testcase_id = data_handler.create_user_uploaded_testcase( key, key, archive_state, filename, file_path_input, timeout, job_type, job_queue, http_flag, gestures, additional_arguments, bug_information, crash_revision, email, platform_id, app_launch_command, fuzzer_name, fully_qualified_fuzzer_name, target_name, bundled, retries, bug_summary_update_flag, quiet_flag, additional_metadata=testcase_metadata, ) if not quiet_flag: testcase = data_handler.get_testcase_by_id(testcase_id) issue = issue_tracker_utils.get_issue_for_testcase(testcase) if issue: report_url = data_handler.TESTCASE_REPORT_URL.format( domain=data_handler.get_domain(), testcase_id=testcase_id) comment = ("ClusterFuzz is analyzing your testcase. " "Developers can follow the progress at %s." % report_url) issue.save(new_comment=comment) helpers.log("Uploaded testcase %s" % testcase_id, helpers.VIEW_OPERATION) self.render_json({"id": "%s" % testcase_id})