def test__plane_generator_from_aggregator(masked_imaging_7x7, samples, model): path_prefix = "aggregator_plane_gen" database_file = path.join(conf.instance.output_path, "plane.sqlite") result_path = path.join(conf.instance.output_path, path_prefix) clean(database_file=database_file, result_path=result_path) search = mock.MockSearch(samples=samples) search.paths = af.DirectoryPaths(path_prefix=path_prefix) analysis = ag.AnalysisImaging(dataset=masked_imaging_7x7) search.fit(model=model, analysis=analysis) agg = af.Aggregator.from_database(filename=database_file) agg.add_directory(directory=result_path) plane_gen = ag.agg.Plane(aggregator=agg) for plane in plane_gen: assert plane.galaxies[0].redshift == 0.5 assert plane.galaxies[0].light.centre == (0.0, 1.0) assert plane.galaxies[1].redshift == 1.0 clean(database_file=database_file, result_path=result_path)
def test_identifier_file(model): paths = af.DirectoryPaths() paths.model = model paths.search = af.DynestyStatic() paths.save_all({}, {}, []) assert os.path.exists(output_path / paths.identifier / ".identifier")
def test__samples_from_model(self): emcee = af.Emcee() emcee.paths = af.DirectoryPaths( path_prefix=path.join("non_linear", "emcee")) emcee.paths._identifier = "tag" model = af.ModelMapper(mock_class=af.m.MockClassx4) model.mock_class.two = af.LogUniformPrior(lower_limit=1e-8, upper_limit=10.0) samples = emcee.samples_from(model=model) assert isinstance(samples.parameter_lists, list) assert isinstance(samples.parameter_lists[0], list) assert isinstance(samples.log_likelihood_list, list) assert isinstance(samples.log_prior_list, list) assert isinstance(samples.log_posterior_list, list) assert isinstance(samples.weight_list, list) assert samples.parameter_lists[0] == pytest.approx( [0.173670, 0.162607, 3095.28, 0.62104], 1.0e-4) assert samples.log_likelihood_list[0] == pytest.approx( -17257775239.32677, 1.0e-4) assert samples.log_prior_list[0] == pytest.approx( 1.6102016075510708, 1.0e-4) assert samples.weight_list[0] == pytest.approx(1.0, 1.0e-4) assert samples.total_steps == 1000 assert samples.total_walkers == 10 assert samples.auto_correlations.times[0] == pytest.approx( 31.98507, 1.0e-4)
def test_non_grid_searched_dimensions(self, mapper): search = af.m.MockSearch() search.paths = af.DirectoryPaths(name="") grid_search = af.SearchGridSearch( number_of_steps=10, search=search ) mappers = list( grid_search.model_mappers( mapper, grid_priors=[mapper.component.one_tuple.one_tuple_0] ) ) assert len(mappers) == 10 assert mappers[0].component.one_tuple.one_tuple_0.lower_limit == 0.0 assert mappers[0].component.one_tuple.one_tuple_0.upper_limit == 0.1 assert mappers[0].component.one_tuple.one_tuple_1.lower_limit == 0.0 assert mappers[0].component.one_tuple.one_tuple_1.upper_limit == 2.0 assert mappers[-1].component.one_tuple.one_tuple_0.lower_limit == 0.9 assert mappers[-1].component.one_tuple.one_tuple_0.upper_limit == 1.0 assert mappers[-1].component.one_tuple.one_tuple_1.lower_limit == 0.0 assert mappers[-1].component.one_tuple.one_tuple_1.upper_limit == 2.0
def test_results(self, grid_search_05, mapper): result = grid_search_05.fit( model=mapper, analysis=af.m.MockAnalysis(), grid_priors=[ mapper.component.one_tuple.one_tuple_0, mapper.component.one_tuple.one_tuple_1, ], ) assert len(result.results) == 4 assert result.no_dimensions == 2 grid_search = af.SearchGridSearch( search=af.m.MockOptimizer(), number_of_steps=10, ) grid_search.search.paths = af.DirectoryPaths(name="sample_name") result = grid_search.fit( model=mapper, analysis=af.m.MockAnalysis(), grid_priors=[ mapper.component.one_tuple.one_tuple_0, mapper.component.one_tuple.one_tuple_1, ], ) assert len(result.results) == 100 assert result.no_dimensions == 2 assert result.log_likelihoods_native.shape == (10, 10)
def test__samples_from_model(self): pyswarms = af.PySwarmsGlobal() pyswarms.paths = af.DirectoryPaths( path_prefix=path.join("non_linear", "pyswarms")) pyswarms.paths._identifier = "tag" model = af.ModelMapper(mock_class=af.m.MockClassx3) model.mock_class.one = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0) model.mock_class.two = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0) model.mock_class.three = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0) # model.mock_class.four = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0) samples = pyswarms.samples_from(model=model) assert isinstance(samples.parameter_lists, list) assert isinstance(samples.parameter_lists[0], list) assert isinstance(samples.log_likelihood_list, list) assert isinstance(samples.log_prior_list, list) assert isinstance(samples.log_posterior_list, list) assert samples.parameter_lists[0] == pytest.approx( [50.1254, 1.04626, 10.09456], 1.0e-4) assert samples.log_likelihood_list[0] == pytest.approx( -5071.80777, 1.0e-4) assert samples.log_posterior_list[0] == pytest.approx( -5070.73298, 1.0e-4) assert samples.weight_list[0] == 1.0 assert len(samples.parameter_lists) == 500 assert len(samples.log_likelihood_list) == 500
def test__fit_imaging_generator_from_aggregator(masked_imaging_7x7, samples, model): path_prefix = "aggregator_fit_imaging_gen" database_file = path.join(conf.instance.output_path, "fit_imaging.sqlite") result_path = path.join(conf.instance.output_path, path_prefix) clean(database_file=database_file, result_path=result_path) search = mock.MockSearch(samples=samples) search.paths = af.DirectoryPaths(path_prefix=path_prefix) analysis = ag.AnalysisImaging(dataset=masked_imaging_7x7) search.fit(model=model, analysis=analysis) agg = af.Aggregator.from_database(filename=database_file) agg.add_directory(directory=result_path) fit_imaging_gen = ag.agg.FitImaging(aggregator=agg) for fit_imaging in fit_imaging_gen: assert (fit_imaging.image == masked_imaging_7x7.image).all() clean(database_file=database_file, result_path=result_path)
def test__samples_from_model(self): drawer = af.Drawer() drawer.paths = af.DirectoryPaths( path_prefix=path.join("non_linear", "drawer")) drawer.paths._identifier = "tag" model = af.ModelMapper(mock_class=af.m.MockClassx3) model.mock_class.one = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0) model.mock_class.two = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0) model.mock_class.three = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0) samples = drawer.samples_from(model=model) assert isinstance(samples.parameter_lists, list) assert isinstance(samples.parameter_lists[0], list) assert isinstance(samples.log_likelihood_list, list) assert isinstance(samples.log_prior_list, list) assert isinstance(samples.log_posterior_list, list) assert samples.parameter_lists[0] == pytest.approx( [49.507679, 49.177471, 14.76753], 1.0e-4) assert samples.log_likelihood_list[0] == pytest.approx( -2763.925766, 1.0e-4) assert samples.log_posterior_list[0] == pytest.approx( -2763.817517, 1.0e-4) assert samples.weight_list[0] == 1.0 assert len(samples.parameter_lists) == 3 assert len(samples.log_likelihood_list) == 3
def test__fit_interferometer_generator_from_aggregator(interferometer_7, mask_2d_7x7, samples, model): path_prefix = "aggregator_fit_interferometer" database_file = path.join(conf.instance.output_path, "fit_interferometer.sqlite") result_path = path.join(conf.instance.output_path, path_prefix) clean(database_file=database_file, result_path=result_path) search = mock.MockSearch(samples=samples) search.paths = af.DirectoryPaths(path_prefix=path_prefix) analysis = ag.AnalysisInterferometer(dataset=interferometer_7) search.fit(model=model, analysis=analysis) agg = af.Aggregator.from_database(filename=database_file) agg.add_directory(directory=result_path) fit_interferometer_gen = ag.agg.FitInterferometer(aggregator=agg) for fit_interferometer in fit_interferometer_gen: assert (fit_interferometer.interferometer.visibilities == interferometer_7.visibilities).all() assert (fit_interferometer.interferometer.real_space_mask == mask_2d_7x7).all() clean(database_file=database_file, result_path=result_path)
def test_serialize(model): paths = af.DirectoryPaths() paths.model = model pickled_paths = pickle.loads(pickle.dumps(paths)) assert pickled_paths.model is not None
def test__samples_from_model(self): lbfgs = af.LBFGS() lbfgs.paths = af.DirectoryPaths(path_prefix=path.join("non_linear", "LBFGS")) lbfgs.paths._identifier = "tag" model = af.ModelMapper(mock_class=af.m.MockClassx3) model.mock_class.one = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0) model.mock_class.two = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0) model.mock_class.three = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0) samples = lbfgs.samples_from(model=model) assert isinstance(samples.parameter_lists, list) assert isinstance(samples.parameter_lists[0], list) assert isinstance(samples.log_likelihood_list, list) assert isinstance(samples.log_prior_list, list) assert isinstance(samples.log_posterior_list, list) assert samples.parameter_lists[0] == pytest.approx( [50.005469, 25.143677, 10.06950], 1.0e-4 ) assert samples.log_likelihood_list[0] == pytest.approx(-45.134121, 1.0e-4) assert samples.log_posterior_list[0] == pytest.approx(-44.97504284, 1.0e-4) assert samples.weight_list[0] == 1.0 assert len(samples.parameter_lists) == 1 assert len(samples.log_likelihood_list) == 1
def test__fit_interferometer_all_above_weight_gen(self, interferometer_7, samples, model): path_prefix = "aggregator_fit_interferometer_gen" database_file = path.join(conf.instance.output_path, "fit_interferometer.sqlite") result_path = path.join(conf.instance.output_path, path_prefix) clean(database_file=database_file, result_path=result_path) search = mock.MockSearch(samples=samples, result=mock.MockResult(model=model, samples=samples)) search.paths = af.DirectoryPaths(path_prefix=path_prefix) analysis = al.AnalysisInterferometer(dataset=interferometer_7) search.fit(model=model, analysis=analysis) agg = af.Aggregator.from_database(filename=database_file) agg.add_directory(directory=result_path) fit_interferometer_agg = al.agg.FitInterferometerAgg(aggregator=agg) fit_interferometer_pdf_gen = fit_interferometer_agg.all_above_weight_gen( minimum_weight=-1.0) i = 0 for fit_interferometer_gen in fit_interferometer_pdf_gen: for fit_interferometer in fit_interferometer_gen: i += 1 if i == 1: assert fit_interferometer.tracer.galaxies[ 0].redshift == 0.5 assert fit_interferometer.tracer.galaxies[ 0].light.centre == ( 1.0, 1.0, ) assert fit_interferometer.tracer.galaxies[ 1].redshift == 1.0 if i == 2: assert fit_interferometer.tracer.galaxies[ 0].redshift == 0.5 assert fit_interferometer.tracer.galaxies[ 0].light.centre == ( 10.0, 10.0, ) assert fit_interferometer.tracer.galaxies[ 1].redshift == 1.0 assert i == 2 clean(database_file=database_file, result_path=result_path)
def test__tracer_all_above_weight_gen(self, masked_imaging_7x7, samples, model): path_prefix = "aggregator_tracer_gen" database_file = path.join(conf.instance.output_path, "tracer.sqlite") result_path = path.join(conf.instance.output_path, path_prefix) clean(database_file=database_file, result_path=result_path) search = mock.MockSearch(samples=samples, result=mock.MockResult(model=model, samples=samples)) search.paths = af.DirectoryPaths(path_prefix=path_prefix) analysis = al.AnalysisImaging(dataset=masked_imaging_7x7) search.fit(model=model, analysis=analysis) agg = af.Aggregator.from_database(filename=database_file) agg.add_directory(directory=result_path) tracer_agg = al.agg.TracerAgg(aggregator=agg) tracer_pdf_gen = tracer_agg.all_above_weight_gen(minimum_weight=-1.0) weight_pdf_gen = tracer_agg.weights_above_gen(minimum_weight=-1.0) i = 0 for (tracer_gen, weight_gen) in zip(tracer_pdf_gen, weight_pdf_gen): for tracer in tracer_gen: i += 1 if i == 1: assert tracer.galaxies[0].redshift == 0.5 assert tracer.galaxies[0].light.centre == (1.0, 1.0) assert tracer.galaxies[1].redshift == 1.0 if i == 2: assert tracer.galaxies[0].redshift == 0.5 assert tracer.galaxies[0].light.centre == (10.0, 10.0) assert tracer.galaxies[1].redshift == 1.0 for weight in weight_gen: if i == 0: assert weight == 0.0 if i == 1: assert weight == 1.0 assert i == 2 clean(database_file=database_file, result_path=result_path)
def test_visualise(): analysis_1 = Analysis() analysis_2 = Analysis() (analysis_1 + analysis_2).visualize( af.DirectoryPaths(), None, None ) assert analysis_1.did_visualise is True assert analysis_2.did_visualise is True
def test_identifier_file(): paths = af.DirectoryPaths() paths.model = af.Model( af.Gaussian ) paths.search = af.DynestyStatic() assert os.path.exists( output_path / paths.identifier / ".identifier" )
def test__profile_log_likelihood(): analysis_1 = Analysis() analysis_2 = Analysis() (analysis_1 + analysis_2).profile_log_likelihood_function( af.DirectoryPaths(), None, ) assert analysis_1.did_profile is True assert analysis_2.did_profile is True
def test__interferometer_generator_from_aggregator( visibilities_7, visibilities_noise_map_7, uv_wavelengths_7x2, mask_2d_7x7, samples, model, ): path_prefix = "aggregator_interferometer" database_file = path.join(conf.instance.output_path, "interferometer.sqlite") result_path = path.join(conf.instance.output_path, path_prefix) clean(database_file=database_file, result_path=result_path) interferometer_7 = ag.Interferometer( visibilities=visibilities_7, noise_map=visibilities_noise_map_7, uv_wavelengths=uv_wavelengths_7x2, real_space_mask=mask_2d_7x7, settings=ag.SettingsInterferometer( grid_class=ag.Grid2DIterate, grid_inversion_class=ag.Grid2DIterate, fractional_accuracy=0.5, sub_steps=[2], transformer_class=ag.TransformerDFT, ), ) search = mock.MockSearch(samples=samples) search.paths = af.DirectoryPaths(path_prefix=path_prefix) analysis = ag.AnalysisInterferometer(dataset=interferometer_7) search.fit(model=model, analysis=analysis) agg = af.Aggregator.from_database(filename=database_file) agg.add_directory(directory=result_path) interferometer_agg = ag.agg.InterferometerAgg(aggregator=agg) interferometer_gen = interferometer_agg.interferometer_gen() for interferometer in interferometer_gen: assert (interferometer.visibilities == interferometer_7.visibilities ).all() assert (interferometer.real_space_mask == mask_2d_7x7).all() assert isinstance(interferometer.grid, ag.Grid2DIterate) assert isinstance(interferometer.grid_inversion, ag.Grid2DIterate) assert interferometer.grid.sub_steps == [2] assert interferometer.grid.fractional_accuracy == 0.5 assert isinstance(interferometer.transformer, ag.TransformerDFT) clean(database_file=database_file, result_path=result_path)
def test__test_mode_parallel_profile_outputs_prof_files(): paths = af.DirectoryPaths(path_prefix=path.join("non_linear", "parallel"), ) process = MockSneakyProcess(paths=paths) # TODO : I dont know how to make it so run doesn't end up in an infinite loop? # process.run()
def test_passes_attributes(self): grid_search = af.SearchGridSearch(number_of_steps=10, search=af.DynestyStatic()) grid_search.paths = af.DirectoryPaths(name="") grid_search.nlive = 20 grid_search.facc = 0.3 search = grid_search.search_instance("name_path") assert search.nlive is grid_search.nlive assert grid_search.paths.path != search.paths.path assert grid_search.paths.output_path != search.paths.output_path
def test_is_flat(): paths = af.DirectoryPaths() subdirectory_path = SubDirectoryPaths(parent=paths, analysis_name="name", is_flat=True) assert subdirectory_path.parent is paths subdirectory_path = SubDirectoryPaths( parent=subdirectory_path, analysis_name="name", is_flat=True, ) assert subdirectory_path.parent is paths
def test__samples_from_model(self): # Setup pickle of mock Dynesty sampler that the samples_from_model function uses. results = MockDynestyResults( samples=np.array([[1.0, 2.0, 3.0, 5.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0]]), logl=[1.0, 2.0, 3.0], logwt=[np.log(1.0), np.log(2.0), np.log(3.0)], ncall=[5.0, 5.0], logz=[-2.0, -1.0, 0.0], nlive=3, ) sampler = MockDynestySampler(results=results) paths = af.DirectoryPaths( path_prefix=path.join("non_linear", "dynesty")) paths._identifier = "tag" dynesty = af.DynestyStatic(nlive=3) dynesty.paths = paths with open(path.join(dynesty.paths.samples_path, "dynesty.pickle"), "wb") as f: dill.dump(sampler, f) model = af.ModelMapper(mock_class=mock.MockClassx4) model.mock_class.two = af.LogUniformPrior(lower_limit=1e-8, upper_limit=10.0) samples = dynesty.samples_from(model=model) assert isinstance(samples.parameter_lists, list) assert isinstance(samples.parameter_lists[0], list) assert isinstance(samples.log_likelihood_list, list) assert isinstance(samples.log_prior_list, list) assert isinstance(samples.log_posterior_list, list) assert isinstance(samples.weight_list, list) assert samples.parameter_lists == [ [1.0, 2.0, 3.0, 5.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], ] assert samples.log_likelihood_list == [1.0, 2.0, 3.0] assert samples.log_prior_list == [0.2, 0.25, 0.25] assert samples.weight_list == pytest.approx([1.0, 2.0, 3.0], 1.0e-4) assert samples.total_samples == 10 assert samples.log_evidence == 0.0 assert samples.number_live_points == 3
def test__median_pdf_parameters(self): emcee = af.Emcee() emcee.paths = af.DirectoryPaths( path_prefix=path.join("non_linear", "emcee")) emcee.paths._identifier = "tag" model = af.ModelMapper(mock_class=af.m.MockClassx4) model.mock_class.two = af.LogUniformPrior(lower_limit=1e-8, upper_limit=10.0) samples = emcee.samples_from(model=model) assert samples.median_pdf_vector == pytest.approx( [0.008422, -0.026413, 9.9579656, 0.494618], 1.0e-3)
def test__modify_before_fit__inversion_no_positions_likelihood__raises_exception( masked_imaging_7x7): lens = al.Galaxy(redshift=0.5, mass=al.mp.SphIsothermal()) source = al.Galaxy(redshift=1.0, pixelization=al.pix.Rectangular, regularization=al.reg.Constant()) model = af.Collection(galaxies=af.Collection(lens=lens, source=source)) analysis = al.AnalysisImaging(dataset=masked_imaging_7x7) with pytest.raises(exc.AnalysisException): analysis.modify_before_fit(paths=af.DirectoryPaths(), model=model) positions_likelihood = al.PositionsLHPenalty(positions=al.Grid2DIrregular([ (1.0, 100.0), (200.0, 2.0) ]), threshold=0.01) analysis = al.AnalysisImaging(dataset=masked_imaging_7x7, positions_likelihood=positions_likelihood) analysis.modify_before_fit(paths=af.DirectoryPaths(), model=model)
def test__log_evidence_from_file_summary(self, multi_nest_summary_path): multi_nest = af.MultiNest() multi_nest.paths = af.DirectoryPaths( path_prefix=path.join("non_linear", "multinest")) create_summary_4_parameters(file_path=multi_nest.paths.samples_path) log_evidence = mn.log_evidence_from_file_summary( file_summary=path.join(multi_nest.paths.samples_path, "multinestsummary.txt"), prior_count=4, ) assert log_evidence == 0.02
def test__autocorrelation_times(self): emcee = af.Emcee() emcee.paths = af.DirectoryPaths( path_prefix=path.join("non_linear", "emcee")) emcee.paths._identifier = "tag" model = af.ModelMapper(mock_class=af.m.MockClassx4) model.mock_class.two = af.LogUniformPrior(lower_limit=1e-8, upper_limit=10.0) samples = emcee.samples_from(model=model) assert samples.auto_correlations.previous_times == pytest.approx( [31.1079, 36.0910, 72.44768, 65.86194], 1.0e-4) assert samples.auto_correlations.times == pytest.approx( [31.98507, 36.51001, 73.47629, 67.67495], 1.0e-4)
def test__samples_from_model(self, multi_nest_samples_path, multi_nest_resume_path, multi_nest_summary_path): multi_nest = af.MultiNest() multi_nest.paths = af.DirectoryPaths( path_prefix=path.join("non_linear", "multinest")) create_weighted_samples_4_parameters( file_path=multi_nest.paths.samples_path) create_resume(file_path=multi_nest.paths.samples_path) create_summary_4_parameters(file_path=multi_nest.paths.samples_path) model = af.ModelMapper(mock_class=mock.MockClassx4) model.mock_class.two = af.LogUniformPrior(lower_limit=1e-8, upper_limit=10.0) samples = multi_nest.samples_from(model=model) assert samples.parameter_lists == [ [1.1, 2.1, 3.1, 4.1], [0.9, 1.9, 2.9, 3.9], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], ] value = -0.5 * 9999999.9 assert samples.log_likelihood_list == 10 * [value] assert samples.log_prior_list == pytest.approx([ 0.243902, 0.256410, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25 ], 1.0e-4) assert samples.weight_list == [ 0.02, 0.02, 0.01, 0.05, 0.1, 0.1, 0.1, 0.1, 0.2, 0.3 ] assert samples.total_samples == 12345 assert samples.log_evidence == 0.02 assert samples.number_live_points == 50
def test__read_quantities_from_weighted_samples_file( self, multi_nest_samples_path): multi_nest = af.MultiNest() multi_nest.paths = af.DirectoryPaths( path_prefix=path.join("non_linear", "multinest")) create_weighted_samples_4_parameters(file_path=multi_nest.paths.path) parameters = mn.parameters_from_file_weighted_samples( file_weighted_samples=path.join(multi_nest.paths.path, "multinest.txt"), prior_count=4, ) assert parameters == [ [1.1, 2.1, 3.1, 4.1], [0.9, 1.9, 2.9, 3.9], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], ] log_likelihood_list = mn.log_likelihood_list_from_file_weighted_samples( file_weighted_samples=path.join(multi_nest.paths.path, "multinest.txt")) value = -0.5 * 9999999.9 assert log_likelihood_list == 10 * [value] weight_list = mn.weight_list_from_file_weighted_samples( file_weighted_samples=path.join(multi_nest.paths.path, "multinest.txt")) assert weight_list == [ 0.02, 0.02, 0.01, 0.05, 0.1, 0.1, 0.1, 0.1, 0.2, 0.3 ]
def test__vector_at_sigma__uses_output_files(self): emcee = af.Emcee() emcee.paths = af.DirectoryPaths( path_prefix=path.join("non_linear", "emcee")) emcee.paths._identifier = "tag" model = af.ModelMapper(mock_class=af.m.MockClassx4) model.mock_class.two = af.LogUniformPrior(lower_limit=1e-8, upper_limit=10.0) samples = emcee.samples_from(model=model) parameters = samples.vector_at_sigma(sigma=3.0) assert parameters[0][0:2] == pytest.approx((-0.003197, 0.019923), 1e-2) parameters = samples.vector_at_sigma(sigma=1.0) assert parameters[0][0:2] == pytest.approx((0.0042278, 0.01087681), 1e-2)
def test__imaging_generator_from_aggregator(imaging_7x7, mask_2d_7x7, samples, model): path_prefix = "aggregator_imaging_gen" database_file = path.join(conf.instance.output_path, "imaging.sqlite") result_path = path.join(conf.instance.output_path, path_prefix) clean(database_file=database_file, result_path=result_path) masked_imaging_7x7 = imaging_7x7.apply_mask(mask=mask_2d_7x7) masked_imaging_7x7 = masked_imaging_7x7.apply_settings( settings=ag.SettingsImaging( grid_class=ag.Grid2DIterate, grid_inversion_class=ag.Grid2DIterate, fractional_accuracy=0.5, sub_steps=[2], ) ) search = mock.MockSearch(samples=samples) search.paths = af.DirectoryPaths(path_prefix=path_prefix) analysis = ag.AnalysisImaging(dataset=masked_imaging_7x7) search.fit(model=model, analysis=analysis) agg = af.Aggregator.from_database(filename=database_file) agg.add_directory(directory=result_path) imaging_agg = ag.agg.ImagingAgg(aggregator=agg) imaging_gen = imaging_agg.imaging_gen() for imaging in imaging_gen: assert (imaging.image == masked_imaging_7x7.image).all() assert isinstance(imaging.grid, ag.Grid2DIterate) assert isinstance(imaging.grid_inversion, ag.Grid2DIterate) assert imaging.grid.sub_steps == [2] assert imaging.grid.fractional_accuracy == 0.5 clean(database_file=database_file, result_path=result_path)
def test__fit_imaging_randomly_drawn_via_pdf_gen_from( self, masked_imaging_7x7, samples, model): path_prefix = "aggregator_fit_imaging_gen" database_file = path.join(conf.instance.output_path, "fit_imaging.sqlite") result_path = path.join(conf.instance.output_path, path_prefix) clean(database_file=database_file, result_path=result_path) search = MockSearch(samples=samples, result=MockResult(model=model, samples=samples)) search.paths = af.DirectoryPaths(path_prefix=path_prefix) analysis = al.AnalysisImaging(dataset=masked_imaging_7x7) search.fit(model=model, analysis=analysis) agg = af.Aggregator.from_database(filename=database_file) agg.add_directory(directory=result_path) fit_imaging_agg = al.agg.FitImagingAgg(aggregator=agg) fit_imaging_pdf_gen = fit_imaging_agg.randomly_drawn_via_pdf_gen_from( total_samples=2) i = 0 for fit_imaging_gen in fit_imaging_pdf_gen: for fit_imaging in fit_imaging_gen: i += 1 assert fit_imaging.tracer.galaxies[0].redshift == 0.5 assert fit_imaging.tracer.galaxies[0].light.centre == (10.0, 10.0) assert fit_imaging.tracer.galaxies[1].redshift == 1.0 assert i == 2 clean(database_file=database_file, result_path=result_path)