def test_sync_projects_update(self): """Testing sync_projects() updating a schedule.""" cloud_scheduler_client = CloudSchedulerClient() with ndb.Client().context(): Project(name='test1', schedule='0 8 * * *', project_yaml_contents='', dockerfile_contents='').put() Project(name='test2', schedule='0 9 * * *', project_yaml_contents='', dockerfile_contents='').put() projects = { 'test1': ProjectMetadata('0 8 * * *', '', ''), 'test2': ProjectMetadata('0 7 * * *', '', '') } sync_projects(cloud_scheduler_client, projects) projects_query = Project.query() self.assertEqual({ 'test1': '0 8 * * *', 'test2': '0 7 * * *' }, {project.name: project.schedule for project in projects_query})
def sync_projects(cloud_scheduler_client, projects): """Sync projects with cloud datastore.""" for project in Project.query(): if project.name in projects: continue logging.info('Deleting project %s', project.name) try: delete_scheduler(cloud_scheduler_client, project.name) project.key.delete() except exceptions.GoogleAPICallError as error: logging.error('Scheduler deletion for %s failed with %s', project.name, error) existing_projects = {project.name for project in Project.query()} for project_name in projects: if project_name in existing_projects: continue try: create_scheduler(cloud_scheduler_client, project_name, projects[project_name].schedule) project_metadata = projects[project_name] Project( name=project_name, schedule=project_metadata.schedule, project_yaml_contents=project_metadata.project_yaml_contents, dockerfile_contents=project_metadata.dockerfile_contents).put( ) except exceptions.GoogleAPICallError as error: logging.error('Scheduler creation for %s failed with %s', project_name, error) for project in Project.query(): if project.name not in projects: continue logging.info('Setting up project %s', project.name) project_metadata = projects[project.name] project_changed = False if project.schedule != project_metadata.schedule: try: logging.info('Schedule changed.') update_scheduler(cloud_scheduler_client, project, projects[project.name].schedule) project.schedule = project_metadata.schedule project_changed = True except exceptions.GoogleAPICallError as error: logging.error('Updating scheduler for %s failed with %s', project.name, error) if project.project_yaml_contents != project_metadata.project_yaml_contents: project.project_yaml_contents = project_metadata.project_yaml_contents project_changed = True if project.dockerfile_contents != project_metadata.dockerfile_contents: project.dockerfile_contents = project_metadata.dockerfile_contents project_changed = True if project_changed: project.put()
def sync_projects(cloud_scheduler_client, projects): """Sync projects with cloud datastore.""" for project in Project.query(): if project.name not in projects: delete_project(cloud_scheduler_client, project) existing_projects = {project.name for project in Project.query()} for project_name in projects: if project_name in existing_projects: continue try: create_scheduler(cloud_scheduler_client, project_name, projects[project_name].schedule, build_project.FUZZING_BUILD_TAG, FUZZING_BUILD_TOPIC) create_scheduler(cloud_scheduler_client, project_name, COVERAGE_SCHEDULE, build_and_run_coverage.COVERAGE_BUILD_TAG, COVERAGE_BUILD_TOPIC) project_metadata = projects[project_name] Project( name=project_name, schedule=project_metadata.schedule, project_yaml_contents=project_metadata.project_yaml_contents, dockerfile_contents=project_metadata.dockerfile_contents).put( ) except exceptions.GoogleAPICallError as error: logging.error('Scheduler creation for %s failed with %s', project_name, error) for project in Project.query(): if project.name not in projects: continue logging.info('Setting up project %s', project.name) project_metadata = projects[project.name] project_changed = False if project.schedule != project_metadata.schedule: try: logging.info('Schedule changed.') update_scheduler(cloud_scheduler_client, project, projects[project.name].schedule, build_project.FUZZING_BUILD_TAG) project.schedule = project_metadata.schedule project_changed = True except exceptions.GoogleAPICallError as error: logging.error('Updating scheduler for %s failed with %s', project.name, error) if project.project_yaml_contents != project_metadata.project_yaml_contents: project.project_yaml_contents = project_metadata.project_yaml_contents project_changed = True if project.dockerfile_contents != project_metadata.dockerfile_contents: project.dockerfile_contents = project_metadata.dockerfile_contents project_changed = True if project_changed: project.put()
def test_get_coverage_build_steps(self, mocked_url, mocked_corpora_steps, mocked_time): """Test for get_build_steps.""" del mocked_url, mocked_corpora_steps, mocked_time datetime.datetime = test_utils.SpoofedDatetime project_yaml_contents = ('language: c++\n' 'sanitizers:\n' ' - address\n' 'architectures:\n' ' - x86_64\n') dockerfile_contents = 'test line' image_project = 'oss-fuzz' base_images_project = 'oss-fuzz-base' expected_build_steps_file_path = test_utils.get_test_data_file_path( 'expected_coverage_build_steps.json') with open(expected_build_steps_file_path) as expected_build_steps_file: expected_coverage_build_steps = json.load(expected_build_steps_file) with ndb.Client().context(): Project(name='test-project', project_yaml_contents=project_yaml_contents, dockerfile_contents=dockerfile_contents).put() dockerfile_lines = dockerfile_contents.split('\n') build_steps = get_build_steps('test-project', project_yaml_contents, dockerfile_lines, image_project, base_images_project) self.assertEqual(build_steps, expected_coverage_build_steps)
def test_sync_projects_create(self): """"Testing sync_projects() creating new schedule.""" cloud_scheduler_client = CloudSchedulerClient() with ndb.Client().context(): Project(name='test1', schedule='0 8 * * *', project_yaml_contents='', dockerfile_contents='').put() projects = { 'test1': ProjectMetadata('0 8 * * *', '', ''), 'test2': ProjectMetadata('0 7 * * *', '', '') } sync_projects(cloud_scheduler_client, projects) projects_query = Project.query() self.assertEqual({ 'test1': '0 8 * * *', 'test2': '0 7 * * *' }, {project.name: project.schedule for project in projects_query}) self.assertCountEqual([ { 'name': 'projects/test-project/location/us-central1/jobs/' 'test2-scheduler-fuzzing', 'pubsub_target': { 'topic_name': 'projects/test-project/topics/request-build', 'data': b'test2' }, 'schedule': '0 7 * * *' }, { 'name': 'projects/test-project/location/us-central1/jobs/' 'test2-scheduler-coverage', 'pubsub_target': { 'topic_name': 'projects/test-project/topics/request-coverage-build', 'data': b'test2' }, 'schedule': '0 6 * * *' }, ], cloud_scheduler_client.schedulers)
def get_project_data(project_name): """Retrieve project metadata from datastore.""" query = Project.query(Project.name == project_name) project = query.get() if not project: raise RuntimeError( f'Project {project_name} not available in cloud datastore') return project.project_yaml_contents, project.dockerfile_contents
def get_project_data(project_name): """Retrieve project metadata from datastore.""" query = Project.query(Project.name == project_name) project = query.get() if not project: raise RuntimeError( 'Project {0} not available in cloud datastore'.format(project_name)) project_yaml_contents = project.project_yaml_contents dockerfile_lines = project.dockerfile_contents.split('\n') return (project_yaml_contents, dockerfile_lines)
def update_status(event, context): """Entry point for cloud function to update build statuses and badges.""" del event, context #unused with ndb.Client().context(): project_build_statuses = update_build_status( build_project.FUZZING_BUILD_TAG, status_filename='status.json') coverage_build_statuses = update_build_status( build_and_run_coverage.COVERAGE_BUILD_TAG, status_filename='status-coverage.json') for project in Project.query(): if project.name not in project_build_statuses or project.name not in coverage_build_statuses: continue update_build_badges(project.name, project_build_statuses[project.name], coverage_build_statuses[project.name])
def update_badges(): """Update badges.""" project_build_statuses = load_status_from_gcs(FUZZING_STATUS_FILENAME) coverage_build_statuses = load_status_from_gcs(COVERAGE_STATUS_FILENAME) with concurrent.futures.ThreadPoolExecutor(max_workers=32) as executor: futures = [] with ndb.Client().context(): for project in Project.query(): if (project.name not in project_build_statuses or project.name not in coverage_build_statuses): continue futures.append( executor.submit(update_build_badges, project.name, project_build_statuses[project.name], coverage_build_statuses[project.name])) concurrent.futures.wait(futures)
def test_get_build_steps(self, mocked_url, mocked_time): """Test for get_build_steps.""" del mocked_url, mocked_time datetime.datetime = SpoofedDatetime project_yaml_contents = 'language: c++\nsanitizers:\n - address\narchitectures:\n - x86_64\n' image_project = 'oss-fuzz' base_images_project = 'oss-fuzz-base' testcase_path = os.path.join(os.path.dirname(__file__), 'expected_build_steps.json') with open(testcase_path) as testcase_file: expected_build_steps = json.load(testcase_file) with ndb.Client().context(): Project(name='test-project', project_yaml_contents=project_yaml_contents, dockerfile_contents='test line').put() build_steps = get_build_steps('test-project', image_project, base_images_project) self.assertEqual(build_steps, expected_build_steps)
def request_coverage_build(event, context): """Entry point for coverage build cloud function.""" del event, context #unused with ndb.Client().context(): credentials, image_project = google.auth.default() for project in Project.query(): project_name = project.name project_yaml_contents = project.project_yaml_contents dockerfile_lines = project.dockerfile_contents.split('\n') # Catching sys.exit() for a project's build steps to avoid it # from interferring with other remaining builds. try: build_steps = build_and_run_coverage.get_build_steps( project_name, project_yaml_contents, dockerfile_lines, image_project, BASE_PROJECT) except SystemExit: continue request_build.run_build(project_name, image_project, build_steps, credentials, '-coverage')
def update_badges(): """Update badges.""" project_build_statuses = load_status_from_gcs(FUZZING_STATUS_FILENAME) coverage_build_statuses = load_status_from_gcs(COVERAGE_STATUS_FILENAME) with concurrent.futures.ThreadPoolExecutor(max_workers=32) as executor: futures = [] for project in Project.query(): if project.name not in project_build_statuses: continue # Certain projects (e.g. JVM and Python) do not have any coverage # builds, but should still receive a badge. coverage_build_status = None if project.name in coverage_build_statuses: coverage_build_status = coverage_build_statuses[project.name] futures.append( executor.submit(update_build_badges, project.name, project_build_statuses[project.name], coverage_build_status)) concurrent.futures.wait(futures)