def create_project_settings(project, info, service_account): """Setup settings for ClusterFuzz (such as CPU distribution).""" key = ndb.Key(data_types.OssFuzzProject, project) oss_fuzz_project = key.get() # Expecting to run a blackbox fuzzer, so use high end hosts. is_high_end = info.get('fuzzing_engines') == ['none'] ccs = ccs_from_info(info) if oss_fuzz_project: if oss_fuzz_project.service_account != service_account['email']: oss_fuzz_project.service_account = service_account['email'] oss_fuzz_project.put() if oss_fuzz_project.high_end != is_high_end: oss_fuzz_project.high_end = is_high_end oss_fuzz_project.put() if oss_fuzz_project.ccs != ccs: oss_fuzz_project.ccs = ccs oss_fuzz_project.put() else: data_types.OssFuzzProject(id=project, name=project, high_end=is_high_end, service_account=service_account['email'], ccs=ccs).put()
def post(self): """Handle a post request.""" key = helpers.get_integer_key(self.request) data_bundle = ndb.Key(data_types.DataBundle, key).get() if not data_bundle: raise helpers.EarlyExitException('Corpus not found', 400) affected_fuzzers = data_types.Fuzzer.query( data_types.Fuzzer.data_bundle_name == data_bundle.name) for fuzzer in affected_fuzzers: fuzzer.data_bundle_name = None fuzzer.put() data_bundle.key.delete() template_values = { 'title': 'Success', 'message': ('Corpus %s is successfully deleted. ' 'Redirecting back to corpora page...') % data_bundle.name, 'redirect_url': '/corpora', } self.render('message.html', template_values)
def get_fuzz_target(self): """Get the associated FuzzTarget entity for this test case.""" name = self.actual_fuzzer_name() if not name: return None return ndb.Key(FuzzTarget, name).get()
def get_task_status(name, create_if_needed=False): """Return the TaskStatus object with the given name.""" metadata = ndb.Key(data_types.TaskStatus, name).get() if not metadata and create_if_needed: metadata = data_types.TaskStatus(id=name) return metadata
def is_current_user_admin(): """Returns whether or not the current logged in user is an admin.""" user = get_current_user() if not user: return False key = ndb.Key(data_types.Admin, user.email) return bool(key.get())
def get_query(*args, **kwargs): """Mock query.""" query = original_query(*args, **kwargs) item = mock.MagicMock() item.key = ndb.Key(TestDatastoreModel, len(self.queries)) self.queries.append([query]) return QueryWrapper(query, [item], self.queries[-1])
def _get_root_cert(project_name): """Get the root TLS cert for connecting to the worker.""" key = ndb.Key(data_types.WorkerTlsCert, project_name) tls_cert = key.get() if not tls_cert: return None assert tls_cert.cert_contents, 'Cert contents should not be empty.' return tls_cert.cert_contents
def post(self): """Handle a post request.""" key = helpers.get_integer_key(self.request) fuzzer = ndb.Key(data_types.Fuzzer, key).get() if not fuzzer: raise helpers.EarlyExitException('Fuzzer not found.', 400) upload_info = self.get_upload() self.apply_fuzzer_changes(fuzzer, upload_info)
def get_testcase_by_id(testcase_id): """Return the testcase with the given id, or None if it does not exist.""" if not testcase_id or not str(testcase_id).isdigit() or int(testcase_id) == 0: raise errors.InvalidTestcaseError testcase = ndb.Key(data_types.Testcase, int(testcase_id)).get() if not testcase: raise errors.InvalidTestcaseError return testcase
def update_job_weights(): """Update job weights.""" for job in data_types.Job.query(): multiplier = DEFAULT_MULTIPLIER if environment.is_engine_fuzzer_job(job.name): targets_count = ndb.Key(data_types.FuzzTargetsCount, job.name).get() if targets_count: multiplier = targets_count.count update_job_weight(job.name, multiplier)
def is_current_user_admin(): """Returns whether or not the current logged in user is an admin.""" if environment.is_local_development(): return True user = get_current_user() if not user: return False key = ndb.Key(data_types.Admin, user.email) return bool(key.get())
def get_coverage_information(fuzzer_name, date, create_if_needed=False): """Get coverage information, or create if it doesn't exist.""" coverage_info = ndb.Key( data_types.CoverageInformation, data_types.coverage_information_key(fuzzer_name, date)).get() if not coverage_info and create_if_needed: coverage_info = data_types.CoverageInformation( fuzzer=fuzzer_name, date=date) return coverage_info
def get_or_create_project_info(self, project_name): """Get OSS-Fuzz CPU info by project name (or create a new one if it doesn't exist).""" key = ndb.Key(data_types.OssFuzzProjectInfo, project_name) project_info = key.get() if not project_info: project_info = data_types.OssFuzzProjectInfo(name=project_name, id=project_name) project_info.put() return project_info
def test_record_fuzz_target(self): """Test that record_fuzz_target works.""" fuzz_task.record_fuzz_target('libFuzzer', 'child', 'job') fuzz_target = ndb.Key(data_types.FuzzTarget, 'libFuzzer_child').get() self.assertDictEqual({ 'binary': 'child', 'engine': 'libFuzzer', 'project': 'test-project', }, fuzz_target.to_dict()) job_mapping = ndb.Key(data_types.FuzzTargetJob, 'libFuzzer_child/job').get() self.assertDictEqual({ 'fuzz_target_name': 'libFuzzer_child', 'job': 'job', 'engine': 'libFuzzer', 'last_run': datetime.datetime(2018, 1, 1, 0, 0), 'weight': 1.0, }, job_mapping.to_dict()) self.assertEqual('libFuzzer_child', fuzz_target.fully_qualified_name()) self.assertEqual('child', fuzz_target.project_qualified_name())
def get_or_create_host_worker_assignment(self, host_name, instance_num): """Get OSS-Fuzz host worker assignment (or create a new one if it doesn't exist).""" key_id = '%s-%d' % (host_name, instance_num) key = ndb.Key(data_types.HostWorkerAssignment, key_id) assignment = key.get() if not assignment: assignment = data_types.HostWorkerAssignment( host_name=host_name, instance_num=instance_num, id=key_id) assignment.put() return assignment
def post(self): """Handle a post request.""" key = helpers.get_integer_key(self.request) fuzzer = ndb.Key(data_types.Fuzzer, key).get() if not fuzzer: raise helpers.EarlyExitException('Fuzzer not found.', 400) fuzzer_selection.update_mappings_for_fuzzer(fuzzer, mappings=[]) fuzzer.key.delete() logs.log('Deleted fuzzer %s.' % fuzzer.name) self.redirect('/fuzzers')
def _get_host_worker_assignment(): """Get the host worker assignment for the current host.""" # This only needs to be called once before the host connects to the worker. # This is because the host->worker assignment algorithm should ensure that a # worker is reassigned only if it is also reimaged. # # If a worker is reimaged, then the host's connection state will be lost and # it will restart its run_bot.py instance to figure out which worker to # connect to again. We should never get into a case where worker re-assignment # happens without them being reimaged. key = ndb.Key(data_types.HostWorkerAssignment, environment.get_value('BOT_NAME')) return key.get()
def update_job_weights(): """Update job weights.""" for job in data_types.Job.query(): multiplier = DEFAULT_MULTIPLIER if environment.is_engine_fuzzer_job(job.name): targets_count = ndb.Key(data_types.FuzzTargetsCount, job.name).get() # If the count is 0, it may be due to a bad build or some other issue. Use # the default weight in that case to allow for recovery. if targets_count and targets_count.count: multiplier = targets_count.count update_job_weight(job.name, multiplier)
def release_lock(key_name, force_release=False, by_zone=True): """Release a lock for the given key name.""" logs.log('Releasing lock for %s.' % key_name) bot_name = environment.get_value('BOT_NAME') if by_zone: key_name_with_zone = _get_key_name_with_lock_zone(key_name) if key_name_with_zone is None: logs.log_error('Failed to get zone while releasing %s.' % key_name) return key_name = key_name_with_zone lock_entity = ndb.Key(data_types.Lock, key_name).get() if lock_entity and (force_release or lock_entity.holder == bot_name): lock_entity.key.delete()
def query_testcase(testcase_id): """Start a query for an associated testcase.""" if not testcase_id: return [] return data_types.Testcase.query(data_types.Testcase.key == ndb.Key( data_types.Testcase, testcase_id)).iter( limit=1, projection=[ data_types.Testcase.crash_type, data_types.Testcase.crash_state, data_types.Testcase.security_flag, data_types.Testcase.bug_information, data_types.Testcase.job_type, data_types.Testcase.fuzzer_name, data_types.Testcase.overridden_fuzzer_name, ])
def get_legacy_blob_info(blob_key): """Return legacy blob info information.""" legacy_blob_info = ndb.Key(BlobInfo, blob_key).get() if not legacy_blob_info: return None if legacy_blob_info.gs_object_name: return legacy_blob_info # Blobs which were stored before the move to GCS have an additional mapping # entry created by our migration jobs. blob_mapping = get_blob_mapping(blob_key) if not blob_mapping: raise BlobsException('Blob mapping not found.') legacy_blob_info.gs_object_name = blob_mapping.gcs_filename return legacy_blob_info
def update_project_cluster(self, project, project_info, cluster, cpu_count, disk_size_gb=None): """Update cluster allocation for a project.""" service_account = None tls_cert = None if cluster.worker: # If this cluster is for untrusted workers, use the project service # account. service_account = project.service_account tls_cert = ndb.Key(data_types.WorkerTlsCert, project.name).get() if not tls_cert: logging.warn('TLS certs not set up yet for %s.', project.name) return cluster_info = project_info.get_cluster_info(cluster.name) if not cluster_info: project_info.clusters.append( data_types.OssFuzzProjectInfo.ClusterInfo( cluster=cluster.name, gce_zone=cluster.gce_zone, cpu_count=cpu_count)) cluster_info = project_info.clusters[-1] # Get a name that can be used for the instance template and instance group. resource_name = get_resource_name(cluster.name, project_info.name) def do_update(): """Update the cluster and cpu count info.""" self.update_cluster( cluster, resource_name, cpu_count, task_tag=project_info.name, disk_size_gb=disk_size_gb, service_account=service_account, tls_cert=tls_cert) cluster_info.cpu_count = cpu_count self.pending_updates.append(self.thread_pool.submit(do_update))
def _try_acquire_lock(key_name, expiration_time, holder): """Actual lock acquire that runs in a transaction.""" lock_entity = ndb.Key(data_types.Lock, key_name).get() if lock_entity is None: # Lock wasn't held, try to acquire. lock_entity = data_types.Lock( id=key_name, expiration_time=expiration_time, holder=holder) lock_entity.put() return lock_entity if lock_entity.expiration_time <= datetime.datetime.utcnow(): # Lock was expired, try to take over the lock. lock_entity.expiration_time = expiration_time lock_entity.holder = holder lock_entity.put() return lock_entity
def test_run_command_progression(self): """Test run_command with a progression task.""" commands.run_command('progression', '123', 'job') self.assertEqual(1, self.mock.progression_execute_task.call_count) self.mock.progression_execute_task.assert_called_with('123', 'job') # TaskStatus should indicate success. task_status_entities = list(data_types.TaskStatus.query()) self.assertEqual(1, len(task_status_entities)) task_status = task_status_entities[0] self.assertEqual(ndb.Key(data_types.TaskStatus, 'progression 123 job'), task_status.key) self.assertDictEqual( { 'bot_name': 'bot_name', 'status': 'finished', 'time': test_utils.CURRENT_TIME, }, task_status.to_dict())
def _update_lock_statistics(key_name, acquires=0, bails=0, failed_acquires=0, lost=0, wait_time=0): """Update lock statistics.""" shard_index = random.randint(1, NUM_STATISTICS_SHARDS) shard_key_name = '%s-%d' % (key_name, shard_index) try: lock_entity = ndb.Key(data_types.LockStatShard, shard_key_name).get() if not lock_entity: lock_entity = data_types.LockStatShard(id=shard_key_name) lock_entity.acquires += acquires lock_entity.bails += bails lock_entity.failed_acquires += failed_acquires lock_entity.lost += lost lock_entity.wait_time += wait_time lock_entity.put() except Exception: logs.log_error('Failed to update lock statistics.') return
def post(self): """Handle a post request.""" key = helpers.get_integer_key(self.request) job = ndb.Key(data_types.Job, key).get() if not job: raise helpers.EarlyExitException('Job not found.', 400) # Delete from fuzzers' jobs' list. for fuzzer in ndb_utils.get_all_from_model(data_types.Fuzzer): if job.name in fuzzer.jobs: fuzzer.jobs.remove(job.name) fuzzer.put() # Delete associated fuzzer-job mapping(s). query = data_types.FuzzerJob.query() query = query.filter(data_types.FuzzerJob.job == job.name) for mapping in ndb_utils.get_all_from_query(query): mapping.key.delete() # Delete job. job.key.delete() helpers.log('Deleted job %s' % job.name, helpers.MODIFY_OPERATION) self.redirect('/jobs')
def _pre_put_hook(self): """Pre-put hook.""" self.key = ndb.Key(CoverageInformation, coverage_information_key(self.fuzzer, self.date))
def _pre_put_hook(self): """Pre-put hook.""" self.key = ndb.Key( FuzzTargetJob, fuzz_target_job_key(self.fuzz_target_name, self.job))
def _pre_put_hook(self): """Pre-put hook.""" self.key = ndb.Key(FuzzTarget, self.fully_qualified_name())
def _get_oss_fuzz_project(project_name): """Return the OssFuzzProject entity for the given project.""" return ndb.Key(data_types.OssFuzzProject, project_name).get()