def test_illegal_type() -> None: checkpoint_config = {"type": 4} with pytest.raises(TypeError, match="must be a string"): env = test_util.get_dummy_env() tensorboard.build( env.det_cluster_id, env.det_experiment_id, env.det_trial_id, checkpoint_config )
def test_getting_manager_instance(tmp_path: pathlib.Path) -> None: checkpoint_config = {"type": "shared_fs", "host_path": HOST_PATH} env = test_util.get_dummy_env() manager = tensorboard.build( env.det_cluster_id, env.det_experiment_id, env.det_trial_id, checkpoint_config ) assert isinstance(manager, tensorboard.SharedFSTensorboardManager)
def test_s3_build(prefix: Optional[str]) -> None: env = test_util.get_dummy_env() conf = copy.deepcopy(default_conf) conf["prefix"] = prefix manager = tensorboard.build(env.det_cluster_id, env.det_experiment_id, env.det_trial_id, conf) assert isinstance(manager, tensorboard.S3TensorboardManager)
def test_unknown_type() -> None: checkpoint_config = { "type": "unknown", "host_path": HOST_PATH, } with pytest.raises(TypeError, match="Unknown storage type: unknown"): tensorboard.build(test_util.get_dummy_env(), checkpoint_config)
def test_list_nonexistent_directory(tmp_path: pathlib.Path) -> None: base_path = pathlib.Path("/non-existent-directory") sync_path = tensorboard.get_sync_path(test_util.get_dummy_env()) manager = tensorboard.SharedFSTensorboardManager(str(tmp_path), base_path, sync_path) assert not pathlib.Path(base_path).exists() assert manager.list_tfevents() == []
def test_invalid_prefix(monkeypatch: monkeypatch.MonkeyPatch) -> None: env = test_util.get_dummy_env() conf = copy.deepcopy(default_conf) conf["prefix"] = "my/invalid/../prefix" with pytest.raises(ValueError): tensorboard.build(env.det_cluster_id, env.det_experiment_id, env.det_trial_id, conf)
def test_s3_build_missing_param() -> None: conf = copy.deepcopy(default_conf) del conf["bucket"] with pytest.raises(KeyError): env = test_util.get_dummy_env() tensorboard.build(env.det_cluster_id, env.det_experiment_id, env.det_trial_id, conf)
def test_getting_manager_instance(tmp_path: pathlib.Path) -> None: checkpoint_config = { "type": "shared_fs", "host_path": HOST_PATH, "container_path": tmp_path } manager = tensorboard.build(test_util.get_dummy_env(), checkpoint_config) assert isinstance(manager, SharedFSTensorboardManager)
def test_list_directory(tmp_path: pathlib.Path) -> None: base_path = tensorboard.get_base_path({"base_path": BASE_PATH}, manager=True) sync_path = tensorboard.get_sync_path(test_util.get_dummy_env()) manager = tensorboard.SharedFSTensorboardManager(str(tmp_path), base_path, sync_path) full_event_path = BASE_PATH.joinpath("tensorboard", "events.out.tfevents.example") assert set(manager.list_tfevents()) == {full_event_path}
def test_s3_faulty_lifecycle(monkeypatch: monkeypatch.MonkeyPatch) -> None: monkeypatch.setattr("boto3.client", s3.s3_faulty_client) env = test_util.get_dummy_env() manager = tensorboard.build(env.det_cluster_id, env.det_experiment_id, env.det_trial_id, default_conf) with pytest.raises(exceptions.S3UploadFailedError): manager.sync()
def test_unknown_type() -> None: checkpoint_config = { "type": "unknown", "host_path": HOST_PATH, } with pytest.raises(TypeError, match="Unknown storage type: unknown"): env = test_util.get_dummy_env() tensorboard.build(env.det_cluster_id, env.det_experiment_id, env.det_trial_id, checkpoint_config)
def test_setting_storage_path(tmp_path: pathlib.Path) -> None: checkpoint_config = { "type": "shared_fs", "host_path": str(HOST_PATH), "storage_path": str(STORAGE_PATH), } manager = tensorboard.build(test_util.get_dummy_env(), checkpoint_config) assert isinstance(manager, tensorboard.SharedFSTensorboardManager) assert manager.storage_path == STORAGE_PATH
def test_setting_optional_variable(tmp_path: pathlib.Path) -> None: checkpoint_config = { "type": "shared_fs", "base_path": "test_value", "host_path": HOST_PATH, } manager = tensorboard.build(test_util.get_dummy_env(), checkpoint_config) assert isinstance(manager, tensorboard.SharedFSTensorboardManager) assert manager.base_path == pathlib.Path("test_value/tensorboard")
def test_s3_lifecycle(monkeypatch: monkeypatch.MonkeyPatch) -> None: monkeypatch.setattr("boto3.client", s3.s3_client) manager = tensorboard.build(test_util.get_dummy_env(), default_conf) assert isinstance(manager, tensorboard.S3TensorboardManager) manager.sync() expected = ( "s3_bucket", "uuid-123/tensorboard/experiment/1/trial/1/events.out.tfevents.example", ) assert expected in manager.client.objects
def test_list_nonexistent_directory(tmp_path: pathlib.Path) -> None: env = test_util.get_dummy_env() base_path = pathlib.Path("/non-existent-directory") sync_path = tensorboard.get_sync_path(env.det_cluster_id, env.det_experiment_id, env.det_trial_id) manager = tensorboard.SharedFSTensorboardManager(str(tmp_path), base_path, sync_path) assert not pathlib.Path(base_path).exists() assert manager.list_tfevents() == []
def test_list_nonexistent_directory(tmp_path: pathlib.Path) -> None: base_path = "/non-existent-directory" checkpoint_config = { "type": "shared_fs", "base_path": base_path, "host_path": HOST_PATH, "container_path": tmp_path, } manager = tensorboard.build(test_util.get_dummy_env(), checkpoint_config) assert not pathlib.Path(base_path).exists() assert manager.list_tfevents() == []
def test_list_directory(tmp_path: pathlib.Path) -> None: checkpoint_config = { "type": "shared_fs", "base_path": BASE_PATH, "host_path": HOST_PATH, "container_path": tmp_path, } manager = tensorboard.build(test_util.get_dummy_env(), checkpoint_config) full_event_path = BASE_PATH.joinpath("tensorboard", "events.out.tfevents.example") assert set(manager.list_tfevents()) == {full_event_path}
def test_build_with_container_path(tmp_path: pathlib.Path) -> None: checkpoint_config = { "type": "shared_fs", "host_path": str(HOST_PATH), "storage_path": str(STORAGE_PATH), } env = test_util.get_dummy_env() manager = tensorboard.build( env.det_cluster_id, env.det_experiment_id, env.det_trial_id, checkpoint_config, container_path=str(tmp_path), ) assert isinstance(manager, tensorboard.SharedFSTensorboardManager) assert manager.storage_path == tmp_path.joinpath("test_storage_path")
def test_s3_lifecycle(monkeypatch: monkeypatch.MonkeyPatch, prefix: Optional[str]) -> None: monkeypatch.setattr("boto3.client", s3.s3_client) env = test_util.get_dummy_env() conf = copy.deepcopy(default_conf) conf["prefix"] = prefix manager = tensorboard.build(env.det_cluster_id, env.det_experiment_id, env.det_trial_id, conf) assert isinstance(manager, tensorboard.S3TensorboardManager) tfevents_path = "uuid-123/tensorboard/experiment/1/trial/1/events.out.tfevents.example" manager.sync() if prefix is not None: tfevents_path = os.path.join( os.path.normpath(prefix).lstrip("/"), tfevents_path) expected = ( "s3_bucket", tfevents_path, ) assert expected in manager.client.objects
def test_s3_build() -> None: manager = tensorboard.build(test_util.get_dummy_env(), default_conf) assert isinstance(manager, tensorboard.S3TensorboardManager)
def test_illegal_type() -> None: checkpoint_config = {"type": 4} with pytest.raises(TypeError, match="must be a string"): tensorboard.build(test_util.get_dummy_env(), checkpoint_config)
def test_missing_type() -> None: with pytest.raises(TypeError, match="Missing 'type' parameter"): tensorboard.build(test_util.get_dummy_env(), {})
def test_missing_type() -> None: with pytest.raises(TypeError, match="Missing 'type' parameter"): env = test_util.get_dummy_env() tensorboard.build(env.det_cluster_id, env.det_experiment_id, env.det_trial_id, {})
def test_s3_build_missing_param() -> None: conf = copy.deepcopy(default_conf) del conf["bucket"] with pytest.raises(KeyError): tensorboard.build(test_util.get_dummy_env(), conf)