def test_get_git_repos(self): config = perfzero_config.PerfZeroConfig(mode='mock') config.git_repos_str = 'https://github.com/tensorflow/benchmarks.git;branch_1;hash_1,https://github.com/tensorflow/models.git;branch_2' git_repos = config.get_git_repos('/site_package_dir') git_repo_1 = {} git_repo_1['url'] = 'https://github.com/tensorflow/benchmarks.git' git_repo_1['dir_name'] = 'benchmarks' git_repo_1['local_path'] = '/site_package_dir/benchmarks' git_repo_1['branch'] = 'branch_1' git_repo_1['git_hash'] = 'hash_1' git_repo_2 = {} git_repo_2['url'] = 'https://github.com/tensorflow/models.git' git_repo_2['dir_name'] = 'models' git_repo_2['local_path'] = '/site_package_dir/models' git_repo_2['branch'] = 'branch_2' self.assertEqual(2, len(git_repos)) self.assertEqual(git_repo_1, git_repos[0]) self.assertEqual(git_repo_2, git_repos[1])
if self.config.tpu_parameters is not None: has_exception |= utils.cleanup_tpu(self.config.tpu_parameters) print('Benchmark execution time in seconds by operation:\n {}'.format( json.dumps(self.benchmark_execution_time, indent=2))) print('Benchmark success results:\n{}'.format( json.dumps(benchmark_success_results, indent=2))) print('Benchmark local output directories:\n{}'.format( json.dumps(benchmark_output_dirs, indent=2))) if has_exception: sys.exit(1) if __name__ == '__main__': parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) perfzero_config.add_benchmark_parser_arguments(parser) FLAGS, unparsed = parser.parse_known_args() level = logging.DEBUG if FLAGS.debug else logging.INFO logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', level=level) if unparsed: logging.error('Arguments %s are not recognized', unparsed) sys.exit(1) config_ = perfzero_config.PerfZeroConfig(mode='flags', flags=FLAGS) benchmark_runner = BenchmarkRunner(config_) benchmark_runner.run_benchmark()
report_utils.upload_execution_summary(self.config.bigquery_project_name_str, self.config.bigquery_table_name_str, execution_summary) def _instantiate_benchmark_class(self, output_dir): """Return initialized benchmark class.""" module_import_path, class_name = self.config.benchmark_class_str.rsplit( '.', 1) module = importlib.import_module(module_import_path) class_ = getattr(module, class_name) instance = class_(output_dir=output_dir) return instance if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--debug', action='store_true') FLAGS, unparsed = parser.parse_known_args() level = logging.INFO if FLAGS.debug: level = logging.DEBUG logging.basicConfig( format='%(asctime)s %(levelname)s: %(message)s', level=level) config = perfzero_config.PerfZeroConfig(mode='env') benchmark_runner = BenchmarkRunner(config) benchmark_runner.run_benchmark()
def setUp(self): self.config = perfzero_config.PerfZeroConfig(mode='mock') self.config.dockerfile_path_str = '/docker/DockerFileFoo' super(TestSetupRunner, self).setUp()
def test_get_env_vars(self): config = perfzero_config.PerfZeroConfig(mode='mock') self.assertEqual({}, config.get_env_vars()) os.environ['PERFZERO_VAR1'] = 'value1' self.assertEqual({'PERFZERO_VAR1': 'value1'}, config.get_env_vars())