def make_dataset():
    return al.PointSourceDataset(
        "name",
        positions=al.Grid2DIrregular([(1, 2)]),
        positions_noise_map=al.ValuesIrregular([1]),
        fluxes=al.ValuesIrregular([2]),
        fluxes_noise_map=al.ValuesIrregular([3]),
    )
Esempio n. 2
0
def make_point_source_dataset():
    return al.PointSourceDataset(
        name="point_0",
        positions=make_positions_x2(),
        positions_noise_map=make_positions_noise_map_x2(),
        fluxes=make_fluxes_x2(),
        fluxes_noise_map=make_fluxes_noise_map_x2(),
    )
    def test__figure_of_merit__matches_correct_fit_given_galaxy_profiles(
            self, positions_x2, positions_x2_noise_map):

        point_source_dataset = al.PointSourceDataset(
            name="point_0",
            positions=positions_x2,
            positions_noise_map=positions_x2_noise_map,
        )

        point_source_dict = al.PointSourceDict(
            point_source_dataset_list=[point_source_dataset])

        model = af.Collection(galaxies=af.Collection(lens=al.Galaxy(
            redshift=0.5, point_0=al.ps.PointSource(centre=(0.0, 0.0)))))

        solver = mock.MockPositionsSolver(model_positions=positions_x2)

        analysis = al.AnalysisPointSource(point_source_dict=point_source_dict,
                                          solver=solver)

        instance = model.instance_from_unit_vector([])
        analysis_log_likelihood = analysis.log_likelihood_function(
            instance=instance)

        tracer = analysis.tracer_for_instance(instance=instance)

        fit_positions = al.FitPositionsImage(
            name="point_0",
            positions=positions_x2,
            noise_map=positions_x2_noise_map,
            tracer=tracer,
            positions_solver=solver,
        )

        assert fit_positions.chi_squared == 0.0
        assert fit_positions.log_likelihood == analysis_log_likelihood

        model_positions = al.Grid2DIrregular([(0.0, 1.0), (1.0, 2.0)])
        solver = mock.MockPositionsSolver(model_positions=model_positions)

        analysis = al.AnalysisPointSource(point_source_dict=point_source_dict,
                                          solver=solver)

        analysis_log_likelihood = analysis.log_likelihood_function(
            instance=instance)

        fit_positions = al.FitPositionsImage(
            name="point_0",
            positions=positions_x2,
            noise_map=positions_x2_noise_map,
            tracer=tracer,
            positions_solver=solver,
        )

        assert fit_positions.residual_map.in_list == [1.0, 1.0]
        assert fit_positions.chi_squared == 2.0
        assert fit_positions.log_likelihood == analysis_log_likelihood
    def test__figure_of_merit__includes_fit_fluxes(self, positions_x2,
                                                   positions_x2_noise_map,
                                                   fluxes_x2,
                                                   fluxes_x2_noise_map):

        point_source_dataset = al.PointSourceDataset(
            name="point_0",
            positions=positions_x2,
            positions_noise_map=positions_x2_noise_map,
            fluxes=fluxes_x2,
            fluxes_noise_map=fluxes_x2_noise_map,
        )

        point_source_dict = al.PointSourceDict(
            point_source_dataset_list=[point_source_dataset])

        model = af.Collection(galaxies=af.Collection(lens=al.Galaxy(
            redshift=0.5,
            sis=al.mp.SphIsothermal(einstein_radius=1.0),
            point_0=al.ps.PointSourceFlux(flux=1.0),
        )))

        solver = mock.MockPositionsSolver(model_positions=positions_x2)

        analysis = al.AnalysisPointSource(point_source_dict=point_source_dict,
                                          solver=solver)

        instance = model.instance_from_unit_vector([])

        analysis_log_likelihood = analysis.log_likelihood_function(
            instance=instance)

        tracer = analysis.tracer_for_instance(instance=instance)

        fit_positions = al.FitPositionsImage(
            name="point_0",
            positions=positions_x2,
            noise_map=positions_x2_noise_map,
            tracer=tracer,
            positions_solver=solver,
        )

        fit_fluxes = al.FitFluxes(
            name="point_0",
            fluxes=fluxes_x2,
            noise_map=fluxes_x2_noise_map,
            positions=positions_x2,
            tracer=tracer,
        )

        assert (fit_positions.log_likelihood +
                fit_fluxes.log_likelihood == analysis_log_likelihood)

        model_positions = al.Grid2DIrregular([(0.0, 1.0), (1.0, 2.0)])
        solver = mock.MockPositionsSolver(model_positions=model_positions)

        analysis = al.AnalysisPointSource(point_source_dict=point_source_dict,
                                          solver=solver)

        instance = model.instance_from_unit_vector([])
        analysis_log_likelihood = analysis.log_likelihood_function(
            instance=instance)

        fit_positions = al.FitPositionsImage(
            name="point_0",
            positions=positions_x2,
            noise_map=positions_x2_noise_map,
            tracer=tracer,
            positions_solver=solver,
        )

        fit_fluxes = al.FitFluxes(
            name="point_0",
            fluxes=fluxes_x2,
            noise_map=fluxes_x2_noise_map,
            positions=positions_x2,
            tracer=tracer,
        )

        assert fit_positions.residual_map.in_list == [1.0, 1.0]
        assert fit_positions.chi_squared == 2.0
        assert (fit_positions.log_likelihood +
                fit_fluxes.log_likelihood == analysis_log_likelihood)
mat_plot_2d = aplt.MatPlot2D(
    output=aplt.Output(path=dataset_path, format="png"))

tracer_plotter = aplt.TracerPlotter(tracer=tracer,
                                    grid=grid,
                                    mat_plot_2d=mat_plot_2d)
tracer_plotter.subplot_tracer()
"""
Create a point-source dictionary data object and output this to a `.json` file, which is the format used to load and
analyse the dataset.
"""
point_source_dataset = al.PointSourceDataset(
    name="point_0",
    positions=positions,
    positions_noise_map=positions.values_from_value(value=grid.pixel_scale),
    fluxes=fluxes,
    fluxes_noise_map=al.ValuesIrregular(values=[1.0, 1.0, 1.0, 1.0]),
)

point_source_dict = al.PointSourceDict(
    point_source_dataset_list=[point_source_dataset])

point_source_dict.output_to_json(file_path=path.join(dataset_path,
                                                     "point_source_dict.json"),
                                 overwrite=True)
"""
Pickle the `Tracer` in the dataset folder, ensuring the true `Tracer` is safely stored and available if we need to 
check how the dataset was simulated in the future. 

This will also be accessible via the `Aggregator` if a model-fit is performed using the dataset.