コード例 #1
0
ファイル: tensorboard_test.py プロジェクト: yanndupis/flax
  def test_summarywriter_histogram_defaultbins(self):
    log_dir = tempfile.mkdtemp()
    summary_writer = SummaryWriter(log_dir=log_dir)
    histogram = onp.arange(1000)
    # Histogram will be created for 30 (default) bins.
    summary_writer.histogram(tag='histogram_test', values=histogram, step=1)

    summary_value = self.parse_and_return_summary_value(path=log_dir)
    self.assertEqual(summary_value.tag, 'histogram_test')
    actual_histogram = tensor_util.make_ndarray(summary_value.tensor)
    self.assertTrue(actual_histogram.shape, (30, 3))
    self.assertTrue(
        onp.allclose(actual_histogram[0], (0.0, 33.3, 34.0), atol=1e-01))
コード例 #2
0
ファイル: tensorboard_test.py プロジェクト: yanndupis/flax
  def test_summarywriter_histogram_2bins(self):
    log_dir = tempfile.mkdtemp()
    summary_writer = SummaryWriter(log_dir=log_dir)
    histogram = onp.arange(1000)
    summary_writer.histogram(
        tag='histogram_test', values=histogram, step=1, bins=2)

    summary_value = self.parse_and_return_summary_value(path=log_dir)
    self.assertEqual(summary_value.tag, 'histogram_test')
    actual_histogram = tensor_util.make_ndarray(summary_value.tensor)
    self.assertTrue(actual_histogram.shape, (2, 3))
    self.assertTrue(
        onp.allclose(actual_histogram[0], (0.0, 499.5, 500.0), atol=1e-01))
    self.assertTrue(
        onp.allclose(actual_histogram[1], (499.5, 999.0, 500.0), atol=1e-01))
コード例 #3
0
def _log_histograms(writer: tensorboard.SummaryWriter, model: models.NerfModel,
                    state: model_utils.TrainState):
    """Log histograms to Tensorboard."""
    step = int(state.optimizer.state.step)
    params = state.optimizer.target['model']
    if 'appearance_encoder' in params:
        embeddings = params['appearance_encoder']['embed']['embedding']
        writer.histogram('appearance_embedding', embeddings, step)
    if 'camera_encoder' in params:
        embeddings = params['camera_encoder']['embed']['embedding']
        writer.histogram('camera_embedding', embeddings, step)
    if 'warp_field' in params and model.warp_metadata_encoder_type == 'glo':
        embeddings = params['warp_field']['metadata_encoder']['embed'][
            'embedding']
        writer.histogram('warp_embedding', embeddings, step)
コード例 #4
0
def _log_to_tensorboard(
    writer: tensorboard.SummaryWriter,
    state: model_utils.TrainState,
    scalar_params: training.ScalarParams,
    stats: Dict[str, Union[Dict[str, jnp.ndarray], jnp.ndarray]],
    time_dict: Dict[str, jnp.ndarray],
):
    """Log statistics to Tensorboard."""
    step = int(state.optimizer.state.step)
    writer.scalar("params/learning_rate", scalar_params.learning_rate, step)
    writer.scalar("params/warp_alpha", state.warp_alpha, step)
    writer.scalar("params/elastic_loss/weight",
                  scalar_params.elastic_loss_weight, step)

    # pmean is applied in train_step so just take the item.
    for branch in {"coarse", "fine"}:
        if branch not in stats:
            continue
        for stat_key, stat_value in stats[branch].items():
            writer.scalar(f"{stat_key}/{branch}", stat_value, step)

    if "background_loss" in stats:
        writer.scalar("losses/background", stats["background_loss"], step)

    params = state.optimizer.target["model"]
    if "appearance_encoder" in params:
        embeddings = params["appearance_encoder"]["embed"]["embedding"]
        writer.histogram("appearance_embedding", embeddings, step)
    if "camera_encoder" in params:
        embeddings = params["camera_encoder"]["embed"]["embedding"]
        writer.histogram("camera_embedding", embeddings, step)
    if "warp_field" in params:
        embeddings = params["warp_field"]["metadata_encoder"]["embed"][
            "embedding"]
        writer.histogram("warp_embedding", embeddings, step)

    for k, v in time_dict.items():
        writer.scalar(f"time/{k}", v, step)
コード例 #5
0
def _log_to_tensorboard(writer: tensorboard.SummaryWriter,
                        state: model_utils.TrainState,
                        scalar_params: training.ScalarParams,
                        stats: Dict[str, Union[Dict[str, jnp.ndarray],
                                               jnp.ndarray]],
                        time_dict: Dict[str, jnp.ndarray]):
    """Log statistics to Tensorboard."""
    step = int(state.optimizer.state.step)
    writer.scalar('params/learning_rate', scalar_params.learning_rate, step)
    writer.scalar('params/warp_alpha', state.warp_alpha, step)
    writer.scalar('params/elastic_loss/weight',
                  scalar_params.elastic_loss_weight, step)

    # pmean is applied in train_step so just take the item.
    for branch in {'coarse', 'fine'}:
        if branch not in stats:
            continue
        for stat_key, stat_value in stats[branch].items():
            writer.scalar(f'{stat_key}/{branch}', stat_value, step)

    if 'background_loss' in stats:
        writer.scalar('losses/background', stats['background_loss'], step)

    params = state.optimizer.target['model']
    if 'appearance_encoder' in params:
        embeddings = params['appearance_encoder']['embed']['embedding']
        writer.histogram('appearance_embedding', embeddings, step)
    if 'camera_encoder' in params:
        embeddings = params['camera_encoder']['embed']['embedding']
        writer.histogram('camera_embedding', embeddings, step)
    if 'warp_field' in params:
        embeddings = params['warp_field']['metadata_encoder']['embed'][
            'embedding']
        writer.histogram('warp_embedding', embeddings, step)

    for k, v in time_dict.items():
        writer.scalar(f'time/{k}', v, step)