def _setup(self): """Download data and checkout git repository.""" # Acticate gcloud service start_time = time.time() utils.setup_python_path(self.site_packages_dir, self.config.python_path_str) utils.active_gcloud_service(self.config.gcloud_key_file_url, self.workspace_dir) # pylint: disable=line-too-long utils.make_dir_if_not_exist(self.root_output_dir) self.benchmark_execution_time['activate_gcloud_service'] = time.time() - start_time # pylint: disable=line-too-long # Download data start_time = time.time() utils.download_data(utils.parse_data_downloads_str(self.config.root_data_dir, self.config.gcs_downloads_str)) # pylint: disable=line-too-long utils.download_data(utils.parse_data_downloads_str(self.config.root_data_dir, self.config.data_downloads_str)) # pylint: disable=line-too-long self.benchmark_execution_time['download_data'] = time.time( ) - start_time # Checkout git repositories start_time = time.time() site_package_info = utils.checkout_git_repos( self.config.get_git_repos(self.site_packages_dir), self.config.force_update) self.benchmark_execution_time['checkout_repository'] = time.time() - start_time # pylint: disable=line-too-long self.stream_handler = logging.StreamHandler(sys.stdout) self.stream_handler.setFormatter( logging.Formatter('%(asctime)s %(levelname)s: %(message)s')) logging.getLogger().addHandler(self.stream_handler) return site_package_info
def _setup(self): utils.setup_python_path(self.site_packages_dir, config.python_path_str) utils.active_gcloud_service(self.auth_token_path) utils.make_dir_if_not_exist(self.output_root_dir) self.streamHandler = logging.StreamHandler(sys.stdout) self.streamHandler.setFormatter( logging.Formatter('%(asctime)s %(levelname)s: %(message)s')) logging.getLogger().addHandler(self.streamHandler)
def _setup(self): """Download data and checkout git repository.""" # Acticate gcloud service start_time = time.time() utils.setup_python_path(self.site_packages_dir, self.config.python_path_str) utils.active_gcloud_service(self.config.gcloud_key_file_url, self.workspace_dir) utils.make_dir_if_not_exist(self.root_output_dir) self.benchmark_execution_time['activate_gcloud_service'] = ( time.time() - start_time) # Download data start_time = time.time() utils.download_data( utils.parse_data_downloads_str(self.config.root_data_dir, self.config.gcs_downloads_str)) utils.download_data( utils.parse_data_downloads_str(self.config.root_data_dir, self.config.data_downloads_str)) self.benchmark_execution_time['download_data'] = time.time( ) - start_time # Checkout git repositories start_time = time.time() site_package_info = utils.checkout_git_repos( self.config.get_git_repos(self.site_packages_dir), self.config.use_cached_site_packages) self.benchmark_execution_time['checkout_repository'] = (time.time() - start_time) # Start cloud TPU. if self.config.tpu_parameters is not None: start_time = time.time() utils.setup_tpu(self.config.tpu_parameters) tpu_info = tpu_runtime_utils.configure_tpu( self.config.tpu_parameters) site_package_info['tpu_version'] = tpu_info self.benchmark_execution_time['start_tpu'] = time.time( ) - start_time self.stream_handler = logging.StreamHandler(sys.stdout) self.stream_handler.setFormatter( logging.Formatter('%(asctime)s %(levelname)s: %(message)s')) logging.getLogger().addHandler(self.stream_handler) return site_package_info
def _setup(self): """Download data and checkout git repository.""" # Set up the raid array. start_time = time.time() device_utils.create_drive_from_devices('/data', self.config.gce_nvme_raid_str) self.benchmark_execution_time['create_drive'] = time.time( ) - start_time start_time = time.time() utils.download_from_gcs([{ 'gcs_url': 'gs://tf-performance/auth_tokens', 'local_path': os.path.join(self.workspace_dir, 'auth_tokens') }]) self.benchmark_execution_time['download_token'] = time.time( ) - start_time # Acticate gcloud service start_time = time.time() utils.setup_python_path(self.site_packages_dir, self.config.python_path_str) utils.active_gcloud_service(self.auth_token_path) utils.make_dir_if_not_exist(self.output_root_dir) self.benchmark_execution_time['activate_gcloud_service'] = time.time( ) - start_time # Download data start_time = time.time() utils.download_from_gcs(self.config.get_gcs_downloads('/data')) self.benchmark_execution_time['download_data'] = time.time( ) - start_time # Checkout git repositories start_time = time.time() site_package_info = utils.checkout_git_repos( self.config.get_git_repos(self.site_packages_dir), self.config.force_update) self.benchmark_execution_time['checkout_repository'] = time.time( ) - start_time self.stream_handler = logging.StreamHandler(sys.stdout) self.stream_handler.setFormatter( logging.Formatter('%(asctime)s %(levelname)s: %(message)s')) logging.getLogger().addHandler(self.stream_handler) return site_package_info
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', level=logging.DEBUG) if unparsed: logging.error('Arguments %s are not recognized', unparsed) sys.exit(1) setup_execution_time = {} project_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) workspace_dir = os.path.join(project_dir, FLAGS.workspace) # Download gcloud auth token. Remove this operation in the future when # docker in Kokoro can accesss the GCP metadata server start_time = time.time() utils.active_gcloud_service(FLAGS.gcloud_key_file_url, workspace_dir, download_only=True) setup_execution_time['download_token'] = time.time() - start_time # Set up the raid array. start_time = time.time() device_utils.create_drive_from_devices(FLAGS.root_data_dir, FLAGS.gce_nvme_raid) setup_execution_time['create_drive'] = time.time() - start_time # Create docker image start_time = time.time() dockerfile_path = FLAGS.dockerfile_path if not os.path.exists(dockerfile_path): # Fall back to the deprecated approach if the user-specified # dockerfile_path does not exist
perfzero_config.add_setup_parser_arguments(parser) FLAGS, unparsed = parser.parse_known_args() logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', level=logging.DEBUG) if unparsed: logging.warning('Arguments %s are not recognized', unparsed) setup_execution_time = {} project_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) workspace_dir = os.path.join(project_dir, FLAGS.workspace) # Download gcloud auth token. Remove this operation in the future when # docker in Kokoro can accesss the GCP metadata server start_time = time.time() utils.active_gcloud_service(FLAGS.gcloud_key_file_url, workspace_dir, download_only=True) # pylint: disable=line-too-long setup_execution_time['download_token'] = time.time() - start_time # Set up the raid array. start_time = time.time() device_utils.create_drive_from_devices(FLAGS.root_data_dir, FLAGS.gce_nvme_raid) # pylint: disable=line-too-long setup_execution_time['create_drive'] = time.time() - start_time # Create docker image start_time = time.time() dockerfile_path = os.path.join(project_dir, FLAGS.dockerfile_path) docker_tag = 'temp/tf-gpu' cmd = 'docker build --pull -t {} - < {}'.format(docker_tag, dockerfile_path) utils.run_commands([cmd]) logging.info('Built docker image with tag %s', docker_tag) setup_execution_time['build_docker'] = time.time() - start_time