def test_ancient_checkpoints(self, ckpt_ver): checkpoint_dir = Path(utils.fixtures_path("ancient-checkpoints")) latest_checkpoint = f"{ckpt_ver}-keras" def make_workloads() -> workload.Stream: trainer = utils.TrainAndValidate() yield from trainer.send(steps=1, validation_freq=1, scheduling_unit=1) hparams = { "learning_rate": 0.001, "global_batch_size": 3, "dataset_range": 10 } controller = utils.make_trial_controller_from_trial_implementation( ancient_keras_ckpt.AncientTrial, hparams, make_workloads(), trial_seed=self.trial_seed, checkpoint_dir=str(checkpoint_dir), latest_checkpoint=latest_checkpoint, steps_completed=1, ) controller.run()
def test_checkpoint_loading(ckpt: str, istrial: bool): checkpoint_dir = os.path.join(utils.fixtures_path("ancient-checkpoints"), f"{ckpt}") trial = pytorch.load_trial_from_checkpoint_path(checkpoint_dir) if istrial: assert isinstance(trial, pytorch.PyTorchTrial), type(trial) else: assert isinstance(trial, torch.nn.Module), type(trial)
def test_surface_native_error(): cmd = ["python3", utils.fixtures_path("tf_keras_runtime_error.py")] with subprocess.Popen(cmd, stderr=subprocess.PIPE) as p: err = p.stderr.read() assert p.wait() != 0 if tf.executing_eagerly(): assert b"ValueError: Shapes (None, 10) and (None, 1) are incompatible" in err else: assert b"ValueError: Input 0 of layer sequential is incompatible with the layer" in err
def test_native_api_local_test() -> None: subprocess.check_call( args=[sys.executable, "pytorch_onevar_model.py"], cwd=utils.fixtures_path(""), env={ "PYTHONUNBUFFERED": "1", "PYTHONPATH": f"$PYTHONPATH:{utils.repo_path('harness')}", **os.environ, }, )
def test_surface_native_error(): cmd = ["python3", utils.fixtures_path("tf_keras_runtime_error.py")] with subprocess.Popen(cmd, stderr=subprocess.PIPE) as p: err = p.stderr.read() assert p.wait() != 0 if tf.executing_eagerly(): assert ( b"ValueError: Shapes (None, 10) and (None, 1) are incompatible" in err or b"ValueError: Input 0 of layer sequential is incompatible with the " b"layer: : expected min_ndim=2, found ndim=1. Full shape received: [1]" in err or b"ValueError: Input 0 of layer sequential is incompatible with the " b"layer: : expected min_ndim=2, found ndim=1. Full shape received: (1,)" in err ) else: assert b"ValueError: Input 0 of layer sequential is incompatible with the layer" in err
def test_checkpoint_loading(ckpt_ver): checkpoint_dir = os.path.join(utils.fixtures_path("ancient-checkpoints"), f"{ckpt_ver}-keras") model = keras.load_model_from_checkpoint_path(checkpoint_dir) assert isinstance(model, tf.keras.models.Model), type(model)
def test_local_mode() -> None: utils.run_local_test_mode( utils.fixtures_path("tf_keras_xor_model_native.py"))
cause eager execution to be turned off. """ is_tf2 = version.parse(tf.__version__) >= version.parse( "2.0.0") # type: bool if is_tf2: assert tf.executing_eagerly() @pytest.fixture( scope="function", params=[ tf_keras_xor_model.XORTrial, tf_keras_xor_model.XORTrialWithTrainingMetrics, tf_keras_xor_model.XORTrialWithCustomObjects, tf_keras_xor_model.XORTrialWithDataLayer, [utils.fixtures_path("tf_keras_xor_model_native.py")], [utils.fixtures_path("tf_keras_xor_model_native.py"), "--use-dataset"], ], ) def xor_trial_controller(request): """ This fixture will provide a function that takes a hyperparameters dictionary as input and returns a trial controller. It is parameterized over different implementations (both native and trial), so that any test that uses it may test a full set of implementations. """ if isinstance(request.param, list): def _xor_trial_controller( hparams: Dict[str, Any], workloads: workload.Stream,
def test_local_mode() -> None: utils.run_local_test_mode( utils.fixtures_path("estimator_xor_model_native.py"))
import pytest import tensorflow as tf import determined as det from determined import workload from determined.exec import harness from tests.experiment import utils # noqa: I100 from tests.experiment.fixtures import estimator_xor_model @pytest.fixture( scope="function", params=[ estimator_xor_model.XORTrial, estimator_xor_model.XORTrialDataLayer, [utils.fixtures_path("estimator_xor_model_native.py")], ], ) def xor_trial_controller(request): """ This fixture will provide a function that takes a hyperparameters dictionary as input and returns a trial controller. It is parameterized over different implementations (both native and trial), so that any test that uses it may test a full set of implementations. """ if isinstance(request.param, list): def _xor_trial_controller( hparams: Dict[str, Any], workloads: workload.Stream, batches_per_step: int = 1,
def test_checkpoint_loading(ckpt_ver): checkpoint_dir = os.path.join(utils.fixtures_path("ancient-checkpoints"), f"{ckpt_ver}-estimator") estm = estimator.load_estimator_from_checkpoint_path(checkpoint_dir) assert isinstance(estm, tracking.AutoTrackable), type(estm)