def prepare_tensorboard( env: det.EnvContext, container_path: Optional[str] = None, ) -> Tuple[tensorboard.TensorboardManager, tensorboard.BatchMetricWriter]: tensorboard_mgr = tensorboard.build( env.det_cluster_id, env.det_experiment_id, env.det_trial_id, env.experiment_config["checkpoint_storage"], container_path, ) try: from determined.tensorboard.metric_writers import tensorflow writer: tensorboard.MetricWriter = tensorflow.TFWriter() except ModuleNotFoundError: logging.warning("Tensorflow writer not found") from determined.tensorboard.metric_writers import pytorch writer = pytorch.TorchWriter() return ( tensorboard_mgr, tensorboard.BatchMetricWriter(writer), )
def get_metric_writer() -> tensorboard.BatchMetricWriter: try: from determined.tensorboard.metric_writers import tensorflow writer: tensorboard.MetricWriter = tensorflow.TFWriter() except ModuleNotFoundError: logging.warning("TensorFlow writer not found") from determined.tensorboard.metric_writers import pytorch writer = pytorch.TorchWriter() return tensorboard.BatchMetricWriter(writer)
def prepare_tensorboard( env: det.EnvContext, ) -> Tuple[tensorboard.TensorboardManager, tensorboard.BatchMetricWriter]: tensorboard_mgr = tensorboard.build(env, env.experiment_config["checkpoint_storage"]) try: from determined.tensorboard.metric_writers import pytorch writer: tensorboard.MetricWriter = pytorch.TorchWriter() except ImportError: print("PYTORCH WRITER NOT FOUND") from determined.tensorboard.metric_writers import tensorflow writer = tensorflow.TFWriter() return ( tensorboard_mgr, tensorboard.BatchMetricWriter(writer, env.experiment_config.batches_per_step()), )
def create_metric_writer( cls: Type["TFKerasTrialController"], ) -> tensorboard.BatchMetricWriter: writer = tensorflow.TFWriter() return tensorboard.BatchMetricWriter(writer)