def test_experiment_group_should_stop_early(self): with patch('runner.hp_search.random.hp_random_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_early_stopping) assert mock_fct.call_count == 1 assert experiment_group.should_stop_early() is False assert experiment_group.pending_experiments.count() == 2 # Make a metric for one of the experiments experiment1, experiment2 = list(experiment_group.experiments.all()) metric1 = ExperimentMetric.objects.create(experiment=experiment1, values={'precision': 0.99}) # Check again that early stopping works assert experiment_group.should_stop_early() is True # Add another metric metric2 = ExperimentMetric.objects.create(experiment=experiment2, values={'loss': 0.01}) # Check again that early stopping still works assert experiment_group.should_stop_early() is True # Delete metric1 metric1.delete() # Check again that early stopping still works assert experiment_group.should_stop_early() is True # Delete metric2 metric2.delete() # Check again that early stopping still works assert experiment_group.should_stop_early() is False
def test_experiment_group_should_stop_early(self): with patch('hpsearch.tasks.random.hp_random_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_early_stopping) assert mock_fct.call_count == 1 assert experiment_group.should_stop_early() is False assert experiment_group.pending_experiments.count() == 2 # Make a metric for one of the experiments experiment1, experiment2 = list(experiment_group.experiments.all()) metric1 = ExperimentMetric.objects.create(experiment=experiment1, values={'precision': 0.99}) # Check again that early stopping works assert experiment_group.should_stop_early() is True # Add another metric metric2 = ExperimentMetric.objects.create(experiment=experiment2, values={'loss': 0.01}) # Check again that early stopping still works assert experiment_group.should_stop_early() is True # Delete metric1 metric1.delete() # Check again that early stopping still works assert experiment_group.should_stop_early() is True # Delete metric2 metric2.delete() # Check again that early stopping still works assert experiment_group.should_stop_early() is False
def test_should_stop_early(self): # Experiment group with no early stopping experiment_group = ExperimentGroupFactory() assert experiment_group.should_stop_early() is False # Experiment group with early stopping experiment_group = ExperimentGroupFactory( content=None, params={ 'concurrency': 2, 'random_search': {'n_experiments': 10}, 'early_stopping': [ {'metric': 'precision', 'value': 0.9, 'optimization': 'maximize'} ], 'matrix': {'lr': {'values': [1, 2, 3]}} }) assert experiment_group.should_stop_early() is False # Create experiments and metrics experiments = [ExperimentFactory(experiment_group=experiment_group) for _ in range(2)] ExperimentMetric.objects.create(experiment=experiments[0], values={'precision': 0.8}) assert experiment_group.should_stop_early() is False # Create a metric that triggers early stopping ExperimentMetric.objects.create(experiment=experiments[0], values={'precision': 0.91}) assert experiment_group.should_stop_early() is True
def test_should_stop_early(self, _): # Experiment group with no early stopping experiment_group = ExperimentGroupFactory() assert experiment_group.should_stop_early() is False # Experiment group with early stopping experiment_group = ExperimentGroupFactory( content=None, hptuning={ 'concurrency': 2, 'random_search': {'n_experiments': 10}, 'early_stopping': [ {'metric': 'precision', 'value': 0.9, 'optimization': 'maximize'} ], 'matrix': {'lr': {'values': [1, 2, 3]}} }) assert experiment_group.should_stop_early() is False # Create experiments and metrics experiments = [ExperimentFactory(experiment_group=experiment_group) for _ in range(2)] ExperimentMetric.objects.create(experiment=experiments[0], values={'precision': 0.8}) assert experiment_group.should_stop_early() is False # Create a metric that triggers early stopping ExperimentMetric.objects.create(experiment=experiments[0], values={'precision': 0.91}) assert experiment_group.should_stop_early() is True
def test_experiment_group_should_stop_early(self, create_build_job): build = BuildJobFactory() BuildJobStatus.objects.create(status=JobLifeCycle.SUCCEEDED, job=build) create_build_job.return_value = build, True, True with patch('hpsearch.tasks.random.hp_random_search_start.apply_async' ) as mock_fct: experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_early_stopping) assert mock_fct.call_count == 2 assert experiment_group.should_stop_early() is False assert experiment_group.pending_experiments.count() == 2 assert experiment_group.iteration_config.num_suggestions == 2 # Make a metric for one of the experiments experiment1, experiment2 = list(experiment_group.experiments.all()) metric1 = ExperimentMetric.objects.create(experiment=experiment1, values={'precision': 0.99}) # Check again that early stopping works assert experiment_group.should_stop_early() is True # Add another metric metric2 = ExperimentMetric.objects.create(experiment=experiment2, values={'loss': 0.01}) # Check again that early stopping still works assert experiment_group.should_stop_early() is True # Delete metric1 metric1.delete() # Delete metric2 metric2.delete() # Check again that early stopping still works assert experiment_group.should_stop_early( ) is True # last_metric still has the last values # Add another metric ExperimentMetric.objects.create(experiment=experiment1, values={'precision': 0.8}) ExperimentMetric.objects.create(experiment=experiment2, values={'loss': 0.2}) assert experiment_group.should_stop_early() is False