def output_report(experiment_config: dict, in_progress=False): """Generate the HTML report and write it to |web_bucket|.""" experiment_name = experiment_utils.get_experiment_name() web_filestore_path = posixpath.join(experiment_config['report_filestore'], experiment_name) reports_dir = get_reports_dir() # Don't merge with nonprivate experiments until the very end as doing it # while the experiment is in progress will produce unusable realtime # results. merge_with_nonprivate = (not in_progress and experiment_config.get( 'merge_with_nonprivate', False)) try: logger.debug('Generating report.') filesystem.recreate_directory(reports_dir) generate_report.generate_report( [experiment_name], str(reports_dir), in_progress=in_progress, merge_with_clobber_nonprivate=merge_with_nonprivate) filestore_utils.rsync(str(reports_dir), web_filestore_path, gsutil_options=[ '-h', 'Cache-Control:public,max-age=0,no-transform' ]) logger.debug('Done generating report.') except data_utils.EmptyDataError: logs.warning('No snapshot data.') except Exception: # pylint: disable=broad-except logger.error('Error generating HTML report.')
def copy_resources_to_bucket(config_dir: str, config: Dict): """Copy resources the dispatcher will need for the experiment to the experiment_filestore.""" def filter_file(tar_info): """Filter out unnecessary directories.""" if FILTER_SOURCE_REGEX.match(tar_info.name): return None return tar_info # Set environment variables to use corresponding filestore_utils. os.environ['EXPERIMENT_FILESTORE'] = config['experiment_filestore'] os.environ['EXPERIMENT'] = config['experiment'] experiment_filestore_path = experiment_utils.get_experiment_filestore_path() base_destination = os.path.join(experiment_filestore_path, 'input') # Send the local source repository to the cloud for use by dispatcher. # Local changes to any file will propagate. source_archive = 'src.tar.gz' with tarfile.open(source_archive, 'w:gz') as tar: tar.add(utils.ROOT_DIR, arcname='', recursive=True, filter=filter_file) filestore_utils.cp(source_archive, base_destination + '/', parallel=True) os.remove(source_archive) # Send config files. destination = os.path.join(base_destination, 'config') filestore_utils.rsync(config_dir, destination, parallel=True)
def test_parallel_take_no_effects_locally(fs, use_local_filestore): # pylint: disable=invalid-name,unused-argument """Tests that `parallel` argument takes no effect for local running no matter True or False.""" fs.create_dir(LOCAL_DIR) fs.create_dir(LOCAL_DIR_2) with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.rsync(LOCAL_DIR, LOCAL_DIR_2, parallel=True) filestore_utils.rsync(LOCAL_DIR, LOCAL_DIR_2, parallel=False) call_args_list = mocked_execute.call_args_list assert call_args_list[0] == call_args_list[1] with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.cp(LOCAL_DIR, LOCAL_DIR_2, recursive=True, parallel=True) filestore_utils.cp(LOCAL_DIR, LOCAL_DIR_2, recursive=True, parallel=False) call_args_list = mocked_execute.call_args_list assert call_args_list[0] == call_args_list[1] with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.rm(LOCAL_DIR, recursive=True, parallel=True) filestore_utils.rm(LOCAL_DIR, recursive=True, parallel=False) call_args_list = mocked_execute.call_args_list assert call_args_list[0] == call_args_list[1]
def test_gsutil_parallel_on(fs, use_gsutil): # pylint: disable=invalid-name,unused-argument """Tests that `parallel` is passed to gsutil execution.""" with mock.patch('common.gsutil.gsutil_command') as mocked_gsutil_command: filestore_utils.rsync(GCS_DIR, GCS_DIR_2, parallel=True) test_args_list = mocked_gsutil_command.call_args_list assert 'parallel' in test_args_list[0][1] assert test_args_list[0][1]['parallel'] is True
def save_results(self): """Save the results directory to GCS.""" if not self.gcs_sync_dir: return # Copy results directory before rsyncing it so that we don't get an # exception from uploading a file that changes in size. Files can change # in size because the log file containing the fuzzer's output is in this # directory and can be written to by the fuzzer at any time. results_copy = filesystem.make_dir_copy(self.results_dir) filestore_utils.rsync( results_copy, posixpath.join(self.gcs_sync_dir, self.results_dir))
def output_report(experiment_config: dict, in_progress=False, coverage_report=False): """Generate the HTML report and write it to |web_bucket|.""" experiment_name = experiment_utils.get_experiment_name() reports_dir = get_reports_dir() core_fuzzers = set(get_core_fuzzers()) experiment_fuzzers = set(experiment_config['fuzzers']) fuzzers = experiment_fuzzers.union(core_fuzzers) # Calculate path to store report files in filestore. web_filestore_path = experiment_config['report_filestore'] if not fuzzers.issubset(core_fuzzers): # This means that we are running an experimental report with fuzzers # not in the core list. So, store these in |experimental| sub-directory. web_filestore_path = os.path.join(web_filestore_path, 'experimental') web_filestore_path = posixpath.join(web_filestore_path, experiment_name) # Don't merge with nonprivate experiments until the very end as doing it # while the experiment is in progress will produce unusable realtime # results. merge_with_nonprivate = (not in_progress and experiment_config.get( 'merge_with_nonprivate', False)) try: logger.debug('Generating report.') filesystem.recreate_directory(reports_dir) generate_report.generate_report( [experiment_name], str(reports_dir), report_name=experiment_name, fuzzers=fuzzers, in_progress=in_progress, merge_with_clobber_nonprivate=merge_with_nonprivate, coverage_report=coverage_report) filestore_utils.rsync( str(reports_dir), web_filestore_path, delete=False, # Don't remove existing coverage jsons. gsutil_options=[ '-h', 'Cache-Control:public,max-age=0,no-transform' ]) logger.debug('Done generating report.') except data_utils.EmptyDataError: logs.warning('No snapshot data.') except Exception: # pylint: disable=broad-except logger.error('Error generating HTML report.')
def test_using_gsutil(use_gsutil): # pylint: disable=unused-argument """Tests that gsutil is used in Google Cloud running settings.""" with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.cp(GCS_DIR, GCS_DIR_2, recursive=True) assert 'gsutil' in mocked_execute.call_args_list[0][0][0] with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.ls(GCS_DIR) assert 'gsutil' in mocked_execute.call_args_list[0][0][0] with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.rm(GCS_DIR, recursive=True) assert 'gsutil' in mocked_execute.call_args_list[0][0][0] with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.rsync(GCS_DIR, GCS_DIR_2, recursive=True) assert 'gsutil' in mocked_execute.call_args_list[0][0][0]
def test_using_local_filestore(fs, use_local_filestore): # pylint: disable=invalid-name,unused-argument """Tests that local_filestore is used in local running settings.""" fs.create_dir(LOCAL_DIR) fs.create_dir(LOCAL_DIR_2) with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.cp(LOCAL_DIR, LOCAL_DIR_2, recursive=True) assert 'gsutil' not in mocked_execute.call_args_list[0][0][0] with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.ls(LOCAL_DIR) assert 'gsutil' not in mocked_execute.call_args_list[0][0][0] with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.rm(LOCAL_DIR, recursive=True) assert 'gsutil' not in mocked_execute.call_args_list[0][0][0] with mock.patch('common.new_process.execute') as mocked_execute: filestore_utils.rsync(LOCAL_DIR, LOCAL_DIR_2, recursive=True) assert 'gsutil' not in mocked_execute.call_args_list[0][0][0]
def output_report(web_bucket, in_progress=False): """Generate the HTML report and write it to |web_bucket|.""" experiment_name = experiment_utils.get_experiment_name() reports_dir = get_reports_dir() try: logger.debug('Generating report.') filesystem.recreate_directory(reports_dir) generate_report.generate_report([experiment_name], str(reports_dir), in_progress=in_progress) filestore_utils.rsync(str(reports_dir), web_bucket, gsutil_options=[ '-h', 'Cache-Control:public,max-age=0,no-transform' ]) logger.debug('Done generating report.') except data_utils.EmptyDataError: logs.warning('No snapshot data.') except Exception: # pylint: disable=broad-except logger.error('Error generating HTML report.')