def remove(testcase): """Remove duplicate status from a test case.""" testcase.status = 'Processed' testcase.duplicate_of = None testcase.put() helpers.log('Removed duplicate status for testcase %s' % testcase.key.id(), helpers.MODIFY_OPERATION)
def post(self): """Handle a post request.""" email = utils.normalize_email(self.request.get('email')) entity_kind = self.request.get('entity_kind') entity_name = self.request.get('entity_name') is_prefix = self.request.get('is_prefix') auto_cc = self.request.get('auto_cc') if not email: raise helpers.EarlyExitException('No email provided.', 400) if not entity_name: raise helpers.EarlyExitException('No entity_name provided.', 400) if not entity_kind or entity_kind == 'undefined': raise helpers.EarlyExitException('No entity_kind provided.', 400) entity_kind = get_value_by_name(USER_PERMISSION_ENTITY_KINDS, entity_kind) if entity_kind is None: raise helpers.EarlyExitException('Invalid entity_kind provided.', 400) if not auto_cc or auto_cc == 'undefined': raise helpers.EarlyExitException('No auto_cc provided.', 400) auto_cc = get_value_by_name(USER_PERMISSION_AUTO_CC_TYPES, auto_cc) if auto_cc is None: raise helpers.EarlyExitException('Invalid auto_cc provided.', 400) # Check for existing permission. query = data_types.ExternalUserPermission.query( data_types.ExternalUserPermission.email == email, data_types.ExternalUserPermission.entity_kind == entity_kind, data_types.ExternalUserPermission.entity_name == entity_name) permission = query.get() if not permission: # Doesn't exist, create new one. permission = data_types.ExternalUserPermission( email=email, entity_kind=entity_kind, entity_name=entity_name) permission.is_prefix = bool(is_prefix) permission.auto_cc = auto_cc permission.put() helpers.log('Configuration', helpers.MODIFY_OPERATION) template_values = { 'title': 'Success', 'message': ('User %s permission for entity %s is successfully added. ' 'Redirecting to the configuration page...') % (email, entity_name), 'redirect_url': '/configuration', } self.render('message.html', template_values)
def mark(testcase): """Mark the testcase as fixed.""" testcase.fixed = 'Yes' testcase.open = False testcase.put() helpers.log('Marked testcase %s as fixed' % testcase.key.id(), helpers.MODIFY_OPERATION) return testcase
def redo(testcase, testcase_tasks, user_email): """Redo tasks.""" try: tasks.redo_testcase(testcase, testcase_tasks, user_email) except tasks.InvalidRedoTask as error: raise helpers.EarlyExitException(error.message, 400) helpers.log( 'Redo testcase %d: %s' % (testcase.key.id(), testcase_tasks), helpers.MODIFY_OPERATION)
def remove_group(testcase_id): """Remove the testcase from a group.""" testcase = helpers.get_testcase(testcase_id) group_id = testcase.group_id data_handler.remove_testcase_from_group(testcase) helpers.log( 'Removed the testcase %s from the group %s' % (testcase.key.id(), group_id), helpers.MODIFY_OPERATION) return testcase
def get(self, fuzzer_name): """Handle a get request.""" helpers.log('LogHandler', fuzzer_name) fuzzer = data_types.Fuzzer.query( data_types.Fuzzer.name == fuzzer_name).get() if not fuzzer: raise helpers.EarlyExitException('Fuzzer not found.', 400) return self.render('viewer.html', { 'title': 'Output for ' + fuzzer.name, 'content': fuzzer.console_output, })
def delete_testcase(testcase_id): """Delete a testcase.""" testcase = helpers.get_testcase(testcase_id) # Don't delete testcases that have an associated issue. if testcase.bug_information: raise helpers.EarlyExitException( 'The testcase (id=%d) with an assigned issue cannot be deleted.' % testcase_id, 400) testcase.key.delete() helpers.log('Deleted testcase %s' % testcase_id, helpers.MODIFY_OPERATION)
def remove_issue(testcase_id): """Remove the issue from the testcase.""" testcase = helpers.get_testcase(testcase_id) issue_id = testcase.bug_information testcase.bug_information = '' testcase.put() helpers.log( 'Removed the issue %s from the testcase %s' % (issue_id, testcase.key.id()), helpers.MODIFY_OPERATION) return testcase
def update(testcase): """Update from trunk.""" testcase.last_tested_crash_stacktrace = 'Pending' testcase.put() tasks.add_task('variant', testcase.key.id(), testcase.job_type, queue=tasks.queue_for_testcase(testcase)) helpers.log( 'Marked testcase %s for last tested stacktrace update' % testcase.key.id(), helpers.MODIFY_OPERATION)
def post(self): """Handle a post request.""" key = helpers.get_integer_key(request) fuzzer = ndb.Key(data_types.Fuzzer, key).get() if not fuzzer: raise helpers.EarlyExitException('Fuzzer not found.', 400) fuzzer_selection.update_mappings_for_fuzzer(fuzzer, mappings=[]) fuzzer.key.delete() helpers.log('Deleted fuzzer %s' % fuzzer.name, helpers.MODIFY_OPERATION) return self.redirect('/fuzzers')
def mark(testcase): """Mark the testcase as unconfirmed.""" testcase.one_time_crasher_flag = True if not testcase.fixed: testcase.fixed = 'NA' if not testcase.regression: testcase.regression = 'NA' if not testcase.minimized_keys: testcase.minimized_keys = 'NA' testcase.put() helpers.log('Marked testcase %s as unconfirmed' % testcase.key.id(), helpers.MODIFY_OPERATION)
def mark(testcase, security, severity): """Mark the testcase as security-related.""" testcase.security_flag = security if security: if not severity: severity = severity_analyzer.get_security_severity( testcase.crash_type, testcase.crash_stacktrace, testcase.job_type, bool(testcase.gestures)) testcase.security_severity = severity testcase.put() helpers.log( f'Set security flags on testcase {testcase.key.id()} to {security}.', helpers.MODIFY_OPERATION)
def post(self): """Handle a post request.""" email = self.request.get("email") entity_kind = self.request.get("entity_kind") entity_name = self.request.get("entity_name") if not email: raise helpers.EarlyExitException("No email provided.", 400) if not entity_kind or entity_kind == "undefined": raise helpers.EarlyExitException("No entity_kind provided.", 400) entity_kind = get_value_by_name(USER_PERMISSION_ENTITY_KINDS, entity_kind) if entity_kind is None: raise helpers.EarlyExitException("Invalid entity_kind provided.", 400) if entity_kind == data_types.PermissionEntityKind.UPLOADER: entity_name = None else: if not entity_name: raise helpers.EarlyExitException("No entity_name provided.", 400) # Check for existing permission. permission = data_types.ExternalUserPermission.query( data_types.ExternalUserPermission.email == email, data_types.ExternalUserPermission.entity_kind == entity_kind, data_types.ExternalUserPermission.entity_name == entity_name, ).get() if not permission: raise helpers.EarlyExitException("Permission does not exist.", 400) permission.key.delete() helpers.log("Configuration", helpers.MODIFY_OPERATION) template_values = { "title": "Success", "message": ("User %s permission for entity %s is successfully deleted. " "Redirecting to the configuration page...") % (email, entity_name), "redirect_url": "/configuration", } self.render("message.html", template_values)
def post(self): """Handle a post request.""" email = self.request.get('email') entity_kind = self.request.get('entity_kind') entity_name = self.request.get('entity_name') if not email: raise helpers.EarlyExitException('No email provided.', 400) if not entity_kind or entity_kind == 'undefined': raise helpers.EarlyExitException('No entity_kind provided.', 400) entity_kind = get_value_by_name(USER_PERMISSION_ENTITY_KINDS, entity_kind) if entity_kind is None: raise helpers.EarlyExitException('Invalid entity_kind provided.', 400) if entity_kind == data_types.PermissionEntityKind.UPLOADER: entity_name = None else: if not entity_name: raise helpers.EarlyExitException('No entity_name provided.', 400) # Check for existing permission. permission = data_types.ExternalUserPermission.query( data_types.ExternalUserPermission.email == email, data_types.ExternalUserPermission.entity_kind == entity_kind, data_types.ExternalUserPermission.entity_name == entity_name).get() if not permission: raise helpers.EarlyExitException('Permission does not exist.', 400) permission.key.delete() helpers.log('Configuration', helpers.MODIFY_OPERATION) template_values = { 'title': 'Success', 'message': ('User %s permission for entity %s is successfully deleted. ' 'Redirecting to the configuration page...') % (email, entity_name), 'redirect_url': '/configuration', } self.render('message.html', template_values)
def get_result(): """Get the result for the crash stats page.""" params = dict(request.iterparams()) page = helpers.cast(request.get('page') or 1, int, "'page' is not an int.") group_by = params.get('group', 'platform') params['group'] = group_by sort_by = params.get('sort', 'total_count') params['sort'] = sort_by params['number'] = params.get('number', 'count') # Conditions for individual records. query = crash_stats.Query() query.group_by = group_by query.sort_by = sort_by crash_access.add_scope(query, params, 'security_flag', 'job_type', 'fuzzer_name') filters.add(query, params, FILTERS) # Conditions after grouping. group_query = crash_stats.Query() filters.add(group_query, params, GROUP_FILTERS) try: total_count, rows = crash_stats.get( query=query, group_query=group_query, offset=(page - 1) * PAGE_SIZE, limit=PAGE_SIZE) except ValueError: raise helpers.EarlyExitException('Invalid filters', 400) attach_testcases(rows) helpers.log('CrashStats', helpers.VIEW_OPERATION) result = { 'totalPages': (total_count // PAGE_SIZE) + 1, 'page': page, 'pageSize': PAGE_SIZE, 'items': rows, 'totalCount': total_count } return result, params
def mark(testcase, security, severity): """Mark the testcase as security-related.""" testcase.security_flag = security if security: if not severity: severity = severity_analyzer.get_security_severity( testcase.crash_type, testcase.crash_stacktrace, testcase.job_type, bool(testcase.gestures)) testcase.security_severity = severity bisection.request_bisection(testcase) else: # The bisection infrastructure only cares about security bugs. If this was # marked as non-security, mark it as invalid. bisection.notify_bisection_invalid(testcase) testcase.put() helpers.log( f'Set security flags on testcase {testcase.key.id()} to {security}.', helpers.MODIFY_OPERATION)
def update_issue(testcase, issue_id, needs_summary_update): """Associate (or update) an existing issue with the testcase.""" issue_id = helpers.cast(issue_id, int, 'Issue ID (%s) is not a number!' % issue_id) issue_tracker = helpers.get_issue_tracker_for_testcase(testcase) issue = helpers.get_or_exit( lambda: issue_tracker.get_issue(issue_id), 'Issue (id=%d) is not found!' % issue_id, 'Failed to get the issue (id=%s).' % issue_id, Exception) if not issue.is_open: raise helpers.EarlyExitException( ('The issue (%d) is already closed and further updates are not' ' allowed. Please file a new issue instead!') % issue_id, 400) if not testcase.is_crash(): raise helpers.EarlyExitException( 'This is not a crash testcase, so issue update is not applicable.', 400) issue_comment = data_handler.get_issue_description( testcase, helpers.get_user_email()) if needs_summary_update: issue.title = data_handler.get_issue_summary(testcase) policy = issue_tracker_policy.get(issue_tracker.project) properties = policy.get_existing_issue_properties() for label in properties.labels: for result in issue_filer.apply_substitutions( policy, label, testcase): issue.labels.add(result) issue.save(new_comment=issue_comment) testcase.bug_information = str(issue_id) testcase.put() data_handler.update_group_bug(testcase.group_id) helpers.log('Updated issue %sd' % issue_id, helpers.MODIFY_OPERATION)
def update_issue(testcase, issue_id, needs_summary_update): """Associate (or update) an existing issue with the testcase.""" issue_id = helpers.cast(issue_id, int, 'Issue ID (%s) is not a number!' % issue_id) itm = helpers.get_issue_tracker_manager(testcase) issue = helpers.get_or_exit( lambda: itm.get_issue(issue_id), 'Issue (id=%d) is not found!' % issue_id, 'Failed to get the issue (id=%s).' % issue_id, Exception) if not issue.open: raise helpers.EarlyExitException( ('The issue (%d) is already closed and further updates are not' ' allowed. Please file a new issue instead!') % issue_id, 400) # Create issue parameters. issue.comment = data_handler.get_issue_description( testcase, helpers.get_user_email()) issue_summary = data_handler.get_issue_summary(testcase) # NULL states leads to unhelpful summaries, so do not update in that case. if needs_summary_update and testcase.crash_state != 'NULL': issue.summary = issue_summary # Add label on memory tool used. issue_filer.add_memory_tool_label_if_needed(issue, testcase) # Add view restrictions for internal job types. issue_filer.add_view_restrictions_if_needed(issue, testcase) # Don't enforce security severity label on an existing issue. itm.save(issue) testcase.bug_information = str(issue_id) testcase.put() data_handler.update_group_bug(testcase.group_id) helpers.log('Updated issue %sd' % issue_id, helpers.MODIFY_OPERATION)
def get(self): """Handle a get request.""" helpers.log('Jobs', helpers.VIEW_OPERATION) templates = list(data_types.JobTemplate.query().order( data_types.JobTemplate.name)) queues = get_queues() result, params = get_results(self) self.render( 'jobs.html', { 'result': result, 'templates': templates, 'fieldValues': { 'csrf_token': form.generate_csrf_token(), 'queues': queues, 'update_job_url': '/update-job', 'update_job_template_url': '/update-job-template', 'upload_info': gcs.prepare_blob_upload()._asdict(), }, 'params': params, })
def get(self): """Handle a get request.""" external_user_permissions = list( data_types.ExternalUserPermission.query().order( data_types.ExternalUserPermission.entity_kind, data_types.ExternalUserPermission.entity_name, data_types.ExternalUserPermission.email)) template_values = { 'config': db_config.get(), 'permissions': external_user_permissions, 'fieldValues': { 'csrf_token': form.generate_csrf_token(), 'user_permission_entity_kinds': USER_PERMISSION_ENTITY_KINDS, 'user_permission_auto_cc_types': USER_PERMISSION_AUTO_CC_TYPES, 'add_permission_url': '/add-external-user-permission', 'delete_permission_url': '/delete-external-user-permission', } } helpers.log('Configuration', helpers.VIEW_OPERATION) self.render('configuration.html', template_values)
def get(self): """Handle a get request.""" external_user_permissions = list( data_types.ExternalUserPermission.query().order( data_types.ExternalUserPermission.entity_kind, data_types.ExternalUserPermission.entity_name, data_types.ExternalUserPermission.email, )) template_values = { "config": db_config.get(), "permissions": external_user_permissions, "fieldValues": { "csrf_token": form.generate_csrf_token(), "user_permission_entity_kinds": USER_PERMISSION_ENTITY_KINDS, "user_permission_auto_cc_types": USER_PERMISSION_AUTO_CC_TYPES, "add_permission_url": "/add-external-user-permission", "delete_permission_url": "/delete-external-user-permission", }, } helpers.log("Configuration", helpers.VIEW_OPERATION) self.render("configuration.html", template_values)
def get_results(): """Get results for the jobs page.""" # Return jobs sorted alphabetically by name query = datastore_query.Query(data_types.Job) query.order('name', is_desc=False) params = dict(request.iterparams()) filters.add(query, params, FILTERS) page = helpers.cast(request.get('page', 1), int, "'page' is not an int.") items, total_pages, total_items, has_more = query.fetch_page( page=page, page_size=PAGE_SIZE, projection=None, more_limit=MORE_LIMIT) helpers.log('Jobs', helpers.VIEW_OPERATION) result = { 'hasMore': has_more, 'items': [_job_to_dict(item) for item in items], 'page': page, 'pageSize': PAGE_SIZE, 'totalItems': total_items, 'totalPages': total_pages, } return result, params
def post(self): """Handle a post request.""" name = self.request.get('name') if not name: raise helpers.EarlyExitException( 'Please give this template a name!', 400) if not data_types.Job.VALID_NAME_REGEX.match(name): raise helpers.EarlyExitException( 'Template name can only contain letters, numbers, dashes and ' 'underscores.', 400) environment_string = self.request.get('environment_string') if not environment_string: raise helpers.EarlyExitException( 'No environment string provided for job template.', 400) template = data_types.JobTemplate.query( data_types.JobTemplate.name == name).get() if not template: template = data_types.JobTemplate() template.name = name template.environment_string = environment_string template.put() helpers.log('Template created %s' % name, helpers.MODIFY_OPERATION) template_values = { 'title': 'Success', 'message': ('Template %s is successfully updated. ' 'Redirecting back to jobs page...') % name, 'redirect_url': '/jobs', } self.render('message.html', template_values)
def post(self): """Handle a post request.""" key = helpers.get_integer_key(self.request) job = ndb.Key(data_types.Job, key).get() if not job: raise helpers.EarlyExitException('Job not found.', 400) # Delete from fuzzers' jobs' list. for fuzzer in ndb_utils.get_all_from_model(data_types.Fuzzer): if job.name in fuzzer.jobs: fuzzer.jobs.remove(job.name) fuzzer.put() # Delete associated fuzzer-job mapping(s). query = data_types.FuzzerJob.query() query = query.filter(data_types.FuzzerJob.job == job.name) for mapping in ndb_utils.get_all_from_query(query): mapping.key.delete() # Delete job. job.key.delete() helpers.log('Deleted job %s' % job.name, helpers.MODIFY_OPERATION) self.redirect('/jobs')
def do_post(self): """Upload a testcase.""" testcase_id = self.request.get('testcaseId') uploaded_file = self.get_upload() if testcase_id and not uploaded_file: testcase = helpers.get_testcase(testcase_id) if not access.can_user_access_testcase(testcase): raise helpers.AccessDeniedException() # Use minimized testcase for upload (if available). key = (testcase.minimized_keys if testcase.minimized_keys and testcase.minimized_keys != 'NA' else testcase.fuzzed_keys) uploaded_file = blobs.get_blob_info(key) # Extract filename part from blob. uploaded_file.filename = os.path.basename( uploaded_file.filename.replace('\\', os.sep)) job_type = self.request.get('job') if not job_type: raise helpers.EarlyExitException('Missing job name.', 400) if (not data_types.Job.VALID_NAME_REGEX.match(job_type) or not data_types.Job.query( data_types.Job.name == job_type).get()): raise helpers.EarlyExitException('Invalid job name.', 400) fuzzer_name = '' job_type_lowercase = job_type.lower() if 'libfuzzer' in job_type_lowercase: fuzzer_name = 'libFuzzer' elif 'afl' in job_type_lowercase: fuzzer_name = 'afl' target_name = self.request.get('target') if not fuzzer_name and target_name: raise helpers.EarlyExitException( 'Target name is not applicable to non-engine jobs (AFL, libFuzzer).', 400) if fuzzer_name and not target_name: raise helpers.EarlyExitException( 'Missing target name for engine job (AFL, libFuzzer).', 400) if (target_name and not data_types.Fuzzer.VALID_NAME_REGEX.match(target_name)): raise helpers.EarlyExitException('Invalid target name.', 400) fully_qualified_fuzzer_name = '' if fuzzer_name and target_name: fully_qualified_fuzzer_name, target_name = find_fuzz_target( fuzzer_name, target_name, job_type) if not fully_qualified_fuzzer_name: raise helpers.EarlyExitException('Target does not exist.', 400) if not access.has_access(need_privileged_access=False, job_type=job_type, fuzzer_name=(fully_qualified_fuzzer_name or fuzzer_name)): raise helpers.AccessDeniedException() multiple_testcases = bool(self.request.get('multiple')) http_flag = bool(self.request.get('http')) high_end_job = bool(self.request.get('highEnd')) bug_information = self.request.get('issue') crash_revision = self.request.get('revision') timeout = self.request.get('timeout') retries = self.request.get('retries') bug_summary_update_flag = bool(self.request.get('updateIssue')) additional_arguments = self.request.get('args') app_launch_command = self.request.get('cmd') platform_id = self.request.get('platform') testcase_metadata = self.request.get('metadata') if testcase_metadata: try: testcase_metadata = json.loads(testcase_metadata) if not isinstance(testcase_metadata, dict): raise helpers.EarlyExitException( 'Metadata is not a JSON object.', 400) except Exception: raise helpers.EarlyExitException('Invalid metadata JSON.', 400) archive_state = 0 bundled = False file_path_input = '' email = helpers.get_user_email() # If we have a AFL or libFuzzer target, use that for arguments. # Launch command looks like # python launcher.py {testcase_path} {target_name} if target_name: additional_arguments = '%%TESTCASE%% %s' % target_name # Certain modifications such as app launch command, issue updates are only # allowed for privileged users. privileged_user = access.has_access(need_privileged_access=True) if not privileged_user: if bug_information or bug_summary_update_flag: raise helpers.EarlyExitException( 'You are not privileged to update existing issues.', 400) need_privileged_access = utils.string_is_true( data_handler.get_value_from_job_definition( job_type, 'PRIVILEGED_ACCESS')) if need_privileged_access: raise helpers.EarlyExitException( 'You are not privileged to run this job type.', 400) if app_launch_command: raise helpers.EarlyExitException( 'You are not privileged to run arbitary launch commands.', 400) if testcase_metadata: raise helpers.EarlyExitException( 'You are not privileged to set testcase metadata.', 400) if crash_revision and crash_revision.isdigit(): crash_revision = int(crash_revision) else: crash_revision = 0 if bug_information and not bug_information.isdigit(): raise helpers.EarlyExitException('Bug is not a number.', 400) if not timeout: timeout = 0 elif not timeout.isdigit() or timeout == '0': raise helpers.EarlyExitException( 'Testcase timeout must be a number greater than 0.', 400) else: timeout = int(timeout) if timeout > 120: raise helpers.EarlyExitException( 'Testcase timeout may not be greater than 120 seconds.', 400) if retries: if retries.isdigit(): retries = int(retries) else: retries = None if retries is None or retries > MAX_RETRIES: raise helpers.EarlyExitException( 'Testcase retries must be a number less than %d.' % MAX_RETRIES, 400) else: retries = None try: gestures = ast.literal_eval(self.request.get('gestures')) except: gestures = [] if not gestures: gestures = [] job_queue = tasks.queue_for_job(job_type, is_high_end=high_end_job) if uploaded_file is not None: filename = ''.join([ x for x in uploaded_file.filename if x not in ' ;/?:@&=+$,{}|<>()\\' ]) key = str(uploaded_file.key()) if archive.is_archive(filename): archive_state = data_types.ArchiveStatus.FUZZED if archive_state: if multiple_testcases: if testcase_metadata: raise helpers.EarlyExitException( 'Testcase metadata not supported with multiple testcases.', 400) # Create a job to unpack an archive. metadata = data_types.BundledArchiveMetadata() metadata.blobstore_key = key metadata.timeout = timeout metadata.job_queue = job_queue metadata.job_type = job_type metadata.http_flag = http_flag metadata.archive_filename = filename metadata.uploader_email = email metadata.gestures = gestures metadata.crash_revision = crash_revision metadata.additional_arguments = additional_arguments metadata.bug_information = bug_information metadata.platform_id = platform_id metadata.app_launch_command = app_launch_command metadata.fuzzer_name = fuzzer_name metadata.overridden_fuzzer_name = fully_qualified_fuzzer_name metadata.fuzzer_binary_name = target_name metadata.put() tasks.add_task('unpack', str(metadata.key.id()), job_type, queue=tasks.queue_for_job(job_type)) # Create a testcase metadata object to show the user their upload. upload_metadata = data_types.TestcaseUploadMetadata() upload_metadata.timestamp = datetime.datetime.utcnow() upload_metadata.filename = filename upload_metadata.blobstore_key = key upload_metadata.original_blobstore_key = key upload_metadata.status = 'Pending' upload_metadata.bundled = True upload_metadata.uploader_email = email upload_metadata.retries = retries upload_metadata.bug_summary_update_flag = bug_summary_update_flag upload_metadata.put() helpers.log('Uploaded multiple testcases.', helpers.VIEW_OPERATION) self.render_json({'multiple': True}) return file_path_input = guess_input_file(uploaded_file, filename) if not file_path_input: raise helpers.EarlyExitException(( "Unable to detect which file to launch. The main file\'s name " 'must contain either of %s.' % str(RUN_FILE_PATTERNS)), 400) else: raise helpers.EarlyExitException('Please select a file to upload.', 400) testcase_id = data_handler.create_user_uploaded_testcase( key, key, archive_state, filename, file_path_input, timeout, job_type, job_queue, http_flag, gestures, additional_arguments, bug_information, crash_revision, email, platform_id, app_launch_command, fuzzer_name, fully_qualified_fuzzer_name, target_name, bundled, retries, bug_summary_update_flag, additional_metadata=testcase_metadata) testcase = data_handler.get_testcase_by_id(testcase_id) issue = issue_tracker_utils.get_issue_for_testcase(testcase) if issue: report_url = data_handler.TESTCASE_REPORT_URL.format( domain=data_handler.get_domain(), testcase_id=testcase_id) comment = ('ClusterFuzz is analyzing your testcase. ' 'Developers can follow the progress at %s.' % report_url) issue.save(new_comment=comment) helpers.log('Uploaded testcase %s' % testcase_id, helpers.VIEW_OPERATION) self.render_json({'id': '%s' % testcase_id})
def test_view(self): """Test log view.""" helpers.log('message', helpers.VIEW_OPERATION) self.mock.info.assert_called_once_with('ClusterFuzz: %s (%s): %s.', helpers.VIEW_OPERATION, 'email', 'message')
def test_modify(self): """Test log modify.""" helpers.log('message', helpers.MODIFY_OPERATION) self.mock.info.assert_called_once_with('ClusterFuzz: %s (%s): %s.', helpers.MODIFY_OPERATION, 'email', 'message')
def post(self): """Handle a post request.""" config = db_config.get() if not config: config = data_types.Config() previous_hash = self.request.get('previous_hash') if config.previous_hash and config.previous_hash != previous_hash: raise helpers.EarlyExitException( 'Your change conflicts with another configuration update. ' 'Please refresh and try again.', 500) build_apiary_service_account_private_key = self.request.get( 'build_apiary_service_account_private_key') bug_report_url = self.request.get('bug_report_url') client_credentials = self.request.get('client_credentials') jira_url = self.request.get('jira_url') jira_credentials = self.request.get('jira_credentials') component_repository_mappings = self.request.get( 'component_repository_mappings') contact_string = self.request.get('contact_string') documentation_url = self.request.get('documentation_url') github_credentials = self.request.get('github_credentials') platform_group_mappings = self.request.get('platform_group_mappings') privileged_users = self.request.get('privileged_users') blacklisted_users = self.request.get('blacklisted_users') relax_security_bug_restrictions = self.request.get( 'relax_security_bug_restrictions') relax_testcase_restrictions = self.request.get( 'relax_testcase_restrictions') reproduce_tool_client_id = self.request.get('reproduce_tool_client_id') reproduce_tool_client_secret = self.request.get( 'reproduce_tool_client_secret') reproduction_help_url = self.request.get('reproduction_help_url') test_account_email = self.request.get('test_account_email') test_account_password = self.request.get('test_account_password') wifi_ssid = self.request.get('wifi_ssid') wifi_password = self.request.get('wifi_password') sendgrid_api_key = self.request.get('sendgrid_api_key') sendgrid_sender = self.request.get('sendgrid_sender') config.build_apiary_service_account_private_key = ( build_apiary_service_account_private_key) config.bug_report_url = bug_report_url config.client_credentials = client_credentials config.component_repository_mappings = component_repository_mappings config.contact_string = contact_string config.documentation_url = documentation_url config.github_credentials = github_credentials config.jira_credentials = jira_credentials config.jira_url = jira_url config.platform_group_mappings = platform_group_mappings config.privileged_users = privileged_users config.blacklisted_users = blacklisted_users config.relax_security_bug_restrictions = bool( relax_security_bug_restrictions) config.relax_testcase_restrictions = bool(relax_testcase_restrictions) config.reproduce_tool_client_id = reproduce_tool_client_id config.reproduce_tool_client_secret = reproduce_tool_client_secret config.reproduction_help_url = reproduction_help_url config.test_account_email = test_account_email config.test_account_password = test_account_password config.wifi_ssid = wifi_ssid config.wifi_password = wifi_password config.sendgrid_api_key = sendgrid_api_key config.sendgrid_sender = sendgrid_sender helpers.log('Configuration', helpers.MODIFY_OPERATION) # Before hashing the entity, we must put it so that the internal maps are # updated. config.put() config.previous_hash = utils.entity_hash(config) config.put() template_values = { 'title': 'Success', 'message': ('Configuration is successfully updated. ' 'Redirecting to the configuration page...'), 'redirect_url': '/configuration', } self.render('message.html', template_values)
def get_testcase_detail(testcase): """Get testcase detail for rendering the testcase detail page.""" config = db_config.get() crash_address = testcase.crash_address crash_state = testcase.crash_state crash_state_lines = crash_state.strip().splitlines() crash_type = data_handler.get_crash_type_string(testcase) reproduction_help_url = data_handler.get_reproduction_help_url( testcase, config) external_user = not access.has_access(job_type=testcase.job_type) issue_url = issue_tracker_utils.get_issue_url(testcase) metadata = testcase.get_metadata() original_testcase_size = _get_blob_size_string(testcase.fuzzed_keys) minimized_testcase_size = _get_blob_size_string(testcase.minimized_keys) has_issue_tracker = bool(data_handler.get_issue_tracker_name()) if not testcase.regression: regression = 'Pending' elif testcase.regression == 'NA': regression = 'NA' else: regression = _get_revision_range_html_from_string(testcase.job_type, testcase.regression) fixed_full = None if 'progression_pending' in metadata: fixed = 'Pending' elif not testcase.fixed: fixed = 'NO' elif testcase.fixed == 'NA': fixed = 'NA' elif testcase.fixed == 'Yes': fixed = 'YES' else: fixed = 'YES' fixed_full = _get_revision_range_html_from_string(testcase.job_type, testcase.fixed) last_tested = None last_tested_revision = ( metadata.get('last_tested_revision') or testcase.crash_revision) if last_tested_revision: last_tested = _get_revision_range_html(testcase.job_type, last_tested_revision) crash_revision = testcase.crash_revision crash_revisions_dict = revisions.get_component_revisions_dict( crash_revision, testcase.job_type) crash_stacktrace = data_handler.get_stacktrace(testcase) crash_stacktrace = filter_stacktrace(crash_stacktrace, testcase.crash_type, crash_revisions_dict) crash_stacktrace = convert_to_lines(crash_stacktrace, crash_state_lines, crash_type) crash_stacktrace_preview_lines = _preview_stacktrace(crash_stacktrace) second_crash_stacktrace_revision = metadata.get( 'second_crash_stacktrace_revision') second_crash_stacktrace_revisions_dict = ( revisions.get_component_revisions_dict(second_crash_stacktrace_revision, testcase.job_type)) second_crash_stacktrace = data_handler.get_stacktrace( testcase, stack_attribute='second_crash_stacktrace') second_crash_stacktrace = filter_stacktrace( second_crash_stacktrace, testcase.crash_type, second_crash_stacktrace_revisions_dict) second_crash_stacktrace = convert_to_lines(second_crash_stacktrace, crash_state_lines, crash_type) second_crash_stacktrace_preview_lines = _preview_stacktrace( second_crash_stacktrace) last_tested_crash_revision = metadata.get('last_tested_crash_revision') last_tested_crash_revisions_dict = revisions.get_component_revisions_dict( last_tested_crash_revision, testcase.job_type) last_tested_crash_stacktrace = data_handler.get_stacktrace( testcase, stack_attribute='last_tested_crash_stacktrace') last_tested_crash_stacktrace = filter_stacktrace( last_tested_crash_stacktrace, testcase.crash_type, last_tested_crash_revisions_dict) last_tested_crash_stacktrace = convert_to_lines(last_tested_crash_stacktrace, crash_state_lines, crash_type) last_tested_crash_stacktrace_preview_lines = _preview_stacktrace( last_tested_crash_stacktrace) privileged_user = access.has_access(need_privileged_access=True) # Fix build url link. |storage.cloud.google.com| takes care of using the # right set of authentication credentials needed to access the link. if 'build_url' in metadata: metadata['build_url'] = metadata['build_url'].replace( 'gs://', 'https://storage.cloud.google.com/') pending_blame_task = ( testcase.has_blame() and 'blame_pending' in metadata and metadata['blame_pending']) pending_impact_task = ( testcase.has_impacts() and not testcase.is_impact_set_flag) pending_minimize_task = not testcase.minimized_keys pending_progression_task = ('progression_pending' in metadata and metadata['progression_pending']) pending_regression_task = not testcase.regression pending_stack_task = testcase.last_tested_crash_stacktrace == 'Pending' needs_refresh = ( testcase.status == 'Pending' or ((testcase.status == 'Processed' or testcase.status == 'Duplicate') and (pending_blame_task or pending_impact_task or pending_minimize_task or pending_progression_task or pending_regression_task or pending_stack_task))) if data_types.SecuritySeverity.is_valid(testcase.security_severity): security_severity = severity_analyzer.severity_to_string( testcase.security_severity) else: security_severity = None auto_delete_timestamp = None auto_close_timestamp = None if testcase.one_time_crasher_flag: last_crash_time = ( crash_stats.get_last_crash_time(testcase) or testcase.timestamp) # Set auto-delete timestamp for unreproducible testcases with # no associated bug. if not testcase.bug_information: auto_delete_timestamp = utils.utc_datetime_to_timestamp( last_crash_time + datetime.timedelta( days=data_types.UNREPRODUCIBLE_TESTCASE_NO_BUG_DEADLINE)) # Set auto-close timestamp for unreproducible testcases with # an associated bug. if testcase.open and testcase.bug_information: auto_close_timestamp = utils.utc_datetime_to_timestamp( last_crash_time + datetime.timedelta( days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE)) memory_tool_display_string = environment.get_memory_tool_display_string( testcase.job_type) memory_tool_display_label = memory_tool_display_string.split(':')[0] memory_tool_display_value = memory_tool_display_string.split(':')[1].strip() helpers.log('Testcase %s' % testcase.key.id(), helpers.VIEW_OPERATION) return { 'id': testcase.key.id(), 'crash_type': crash_type, 'crash_address': crash_address, 'crash_state': crash_state, # Used by reproduce tool. 'crash_state_lines': crash_state_lines, 'crash_revision': testcase.crash_revision, 'csrf_token': form.generate_csrf_token(), 'external_user': external_user, 'footer': testcase.comments, 'fixed': fixed, 'fixed_full': fixed_full, 'issue_url': issue_url, 'is_admin': auth.is_current_user_admin(), 'metadata': metadata, 'minimized_testcase_size': minimized_testcase_size, 'needs_refresh': needs_refresh, 'original_testcase_size': original_testcase_size, 'privileged_user': privileged_user, 'regression': regression, 'crash_stacktrace': { 'lines': crash_stacktrace, 'preview_lines': crash_stacktrace_preview_lines, 'revision': revisions.get_real_revision( crash_revision, testcase.job_type, display=True) }, 'second_crash_stacktrace': { 'lines': second_crash_stacktrace, 'preview_lines': second_crash_stacktrace_preview_lines, 'revision': revisions.get_real_revision( second_crash_stacktrace_revision, testcase.job_type, display=True) }, 'last_tested_crash_stacktrace': { 'lines': last_tested_crash_stacktrace, 'preview_lines': last_tested_crash_stacktrace_preview_lines, 'revision': revisions.get_real_revision( last_tested_crash_revision, testcase.job_type, display=True) }, 'security_severity': security_severity, 'security_severities': data_types.SecuritySeverity.list(), 'stats': { 'min_hour': crash_stats.get_min_hour(), 'max_hour': crash_stats.get_max_hour(), }, 'suspected_cls': _parse_suspected_cls(metadata.get('predator_result')), 'testcase': testcase, 'timestamp': utils.utc_datetime_to_timestamp(testcase.timestamp), 'show_blame': testcase.has_blame(), 'show_impact': testcase.has_impacts(), 'impacts_production': testcase.impacts_production(), 'find_similar_issues_options': FIND_SIMILAR_ISSUES_OPTIONS, 'auto_delete_timestamp': auto_delete_timestamp, 'auto_close_timestamp': auto_close_timestamp, 'memory_tool_display_label': memory_tool_display_label, 'memory_tool_display_value': memory_tool_display_value, 'last_tested': last_tested, 'is_admin_or_not_oss_fuzz': is_admin_or_not_oss_fuzz(), 'has_issue_tracker': has_issue_tracker, 'reproduction_help_url': reproduction_help_url, 'is_local_development': environment.is_running_on_app_engine_development(), }
def apply_fuzzer_changes(self, fuzzer, upload_info): """Apply changes to a fuzzer.""" if upload_info and not archive.is_archive(upload_info.filename): raise helpers.EarlyExitException( 'Sorry, only zip, tgz, tar.gz, tbz, and tar.bz2 archives are ' 'allowed!', 400) if fuzzer.builtin: executable_path = launcher_script = None else: executable_path = self._get_executable_path(upload_info) launcher_script = self._get_launcher_script(upload_info) # Executable path is required for non-builtin fuzzers and if it is not # already set. if not fuzzer.executable_path and not executable_path: raise helpers.EarlyExitException( 'Please enter the path to the executable, or if the archive you ' 'uploaded is less than 16MB, ensure that the executable file has ' '"run" in its name.', 400) jobs = self.request.get('jobs', []) timeout = self._get_integer_value('timeout') max_testcases = self._get_integer_value('max_testcases') external_contribution = self.request.get('external_contribution', False) differential = self.request.get('differential', False) environment_string = self.request.get('additional_environment_string') data_bundle_name = self.request.get('data_bundle_name') # Save the fuzzer file metadata. if upload_info: fuzzer.filename = upload_info.filename fuzzer.blobstore_key = str(upload_info.key()) fuzzer.file_size = utils.get_size_string(upload_info.size) fuzzer.jobs = jobs fuzzer.revision = fuzzer.revision + 1 fuzzer.source = helpers.get_user_email() fuzzer.timeout = timeout fuzzer.max_testcases = max_testcases fuzzer.result = None fuzzer.sample_testcase = None fuzzer.console_output = None fuzzer.external_contribution = bool(external_contribution) fuzzer.differential = bool(differential) fuzzer.additional_environment_string = environment_string fuzzer.timestamp = datetime.datetime.utcnow() fuzzer.data_bundle_name = data_bundle_name # Update only if a new archive is provided. if executable_path: fuzzer.executable_path = executable_path # Optional. Also, update only if a new archive is provided and contains a # launcher script. if launcher_script: fuzzer.launcher_script = launcher_script fuzzer.put() fuzzer_selection.update_mappings_for_fuzzer(fuzzer) helpers.log('Uploaded fuzzer %s.' % fuzzer.name, helpers.MODIFY_OPERATION) self.redirect('/fuzzers')