def test_results(self, grid_search_05, mapper):
        result = grid_search_05.fit(
            model=mapper,
            analysis=af.m.MockAnalysis(),
            grid_priors=[
                mapper.component.one_tuple.one_tuple_0,
                mapper.component.one_tuple.one_tuple_1,
            ],
        )

        assert len(result.results) == 4
        assert result.no_dimensions == 2

        grid_search = af.SearchGridSearch(
            search=af.m.MockOptimizer(),
            number_of_steps=10,
        )
        grid_search.search.paths = af.DirectoryPaths(name="sample_name")
        result = grid_search.fit(
            model=mapper,
            analysis=af.m.MockAnalysis(),
            grid_priors=[
                mapper.component.one_tuple.one_tuple_0,
                mapper.component.one_tuple.one_tuple_1,
            ],
        )

        assert len(result.results) == 100
        assert result.no_dimensions == 2
        assert result.log_likelihoods_native.shape == (10, 10)
    def test_non_grid_searched_dimensions(self, mapper):
        search = af.m.MockSearch()
        search.paths = af.DirectoryPaths(name="")
        grid_search = af.SearchGridSearch(
            number_of_steps=10,
            search=search
        )

        mappers = list(
            grid_search.model_mappers(
                mapper, grid_priors=[mapper.component.one_tuple.one_tuple_0]
            )
        )

        assert len(mappers) == 10

        assert mappers[0].component.one_tuple.one_tuple_0.lower_limit == 0.0
        assert mappers[0].component.one_tuple.one_tuple_0.upper_limit == 0.1
        assert mappers[0].component.one_tuple.one_tuple_1.lower_limit == 0.0
        assert mappers[0].component.one_tuple.one_tuple_1.upper_limit == 2.0

        assert mappers[-1].component.one_tuple.one_tuple_0.lower_limit == 0.9
        assert mappers[-1].component.one_tuple.one_tuple_0.upper_limit == 1.0
        assert mappers[-1].component.one_tuple.one_tuple_1.lower_limit == 0.0
        assert mappers[-1].component.one_tuple.one_tuple_1.upper_limit == 2.0
예제 #3
0
def test_serialize_grid_search(optimizer):
    grid_search = af.SearchGridSearch(optimizer)
    assert grid_search.logger.name == "GridSearch (name)"
    assert "logger" not in grid_search.__getstate__()

    dumped = dill.dumps(grid_search)
    loaded = dill.loads(dumped)
    assert loaded.logger is not None
예제 #4
0
def _make_grid_search(mapper, parent_search, session=None):
    search = af.SearchGridSearch(search=af.m.MockOptimizer(session=session),
                                 number_of_steps=2)
    search.fit(model=mapper,
               analysis=af.m.MockAnalysis(),
               grid_priors=[
                   mapper.component.one_tuple.one_tuple_0,
                   mapper.component.one_tuple.one_tuple_1,
               ],
               parent=parent_search)
    return search
예제 #5
0
    def test_passes_attributes(self):
        grid_search = af.SearchGridSearch(number_of_steps=10,
                                          search=af.DynestyStatic())
        grid_search.paths = af.DirectoryPaths(name="")

        grid_search.nlive = 20
        grid_search.facc = 0.3

        search = grid_search.search_instance("name_path")

        assert search.nlive is grid_search.nlive
        assert grid_search.paths.path != search.paths.path
        assert grid_search.paths.output_path != search.paths.output_path
예제 #6
0
`number_of_steps`: The number of steps in the grid search that are performedm which is set to 5 below. 
 
Because the prior on the parameter `centre` is a `UniformPrior` from 0.0 -> 100.0, this means the first grid search
will set the prior on the centre to be a `UniformPrior` from 0.0 -> 20.0. The second will run from 20.0 -> 40.0,
the third 40.0 -> 60.0, and so on.
   
`parallel`: If `True`, each grid search is performed in parallel on your laptop. 

`number_of_cores`: The number of cores the grid search will parallelize the run over. If `number_of_cores=1`, the
search is run in serial. For > 1 core, 1 core is reserved as a farmer, e.g., if `number_of_cores=4` then up to 3 
searches will be run in parallel. In case your laptop has limited hardware resources we do not run in parallel in 
this example by default, but feel free to change the option to `True` if you have a lot of CPUs and memory!
"""
grid_search = af.SearchGridSearch(search=dynesty,
                                  number_of_steps=5,
                                  number_of_cores=1)
"""
We can now run the grid search.

This is where we specify the parameter over which the grid search is performed, in this case the `centre` of the 
`gaussian_feature` in our model.

On my laptop, each model fit performed by the grid search takes ~15000 iterations, whereas the fit above
required ~ 40000 iterations. Thus, in this simple example, the grid search did not speed up the overall analysis 
(unless it is run in parallel). However, more complex and realistic model-fitting problems, the grid search has the
potential to give huge performance improvements if used effectively.
"""
grid_search_result = grid_search.fit(
    model=model,
    analysis=analysis,
def make_grid_search_05():
    search = af.SearchGridSearch(
        search=af.m.MockOptimizer(), number_of_steps=2
    )
    search.search.paths = af.DirectoryPaths(name="sample_name")
    return search
예제 #8
0
def make_grid_search(mapper):
    mock_search = af.m.MockSearch()
    mock_search.paths = af.DirectoryPaths(name="")
    search = af.SearchGridSearch(number_of_steps=10, search=mock_search)
    return search
예제 #9
0
def make_search(session):
    return af.SearchGridSearch(search=af.m.MockOptimizer(session=session),
                               number_of_steps=2)
예제 #10
0
def detection_single_plane(
    path_prefix: str,
    analysis: Union[al.AnalysisImaging, al.AnalysisInterferometer],
    setup_hyper: al.SetupHyper,
    mass_results: af.ResultsCollection,
    subhalo_mass: af.Model(al.mp.MassProfile) = af.Model(
        al.mp.SphNFWMCRLudlow),
    grid_dimension_arcsec: float = 3.0,
    number_of_steps: Union[Tuple[int], int] = 5,
    number_of_cores: int = 1,
    unique_tag: Optional[str] = None,
    session: Optional[bool] = None,
) -> af.ResultsCollection:
    """
    The SLaM SUBHALO PIPELINE for fitting imaging data with or without a lens light component, where it is assumed
    that the subhalo is at the same redshift as the lens galaxy.

    Parameters
    ----------
    path_prefix
        The prefix of folders between the output path and the search folders.
    analysis
        The analysis class which includes the `log_likelihood_function` and can be customized for the SLaM model-fit.
    setup_hyper
        The setup of the hyper analysis if used (e.g. hyper-galaxy noise scaling).
    mass_results
        The results of the SLaM MASS PIPELINE which ran before this pipeline.
    subhalo_mass
        The `MassProfile` used to fit the subhalo in this pipeline.
    grid_dimension_arcsec
        the arc-second dimensions of the grid in the y and x directions. An input value of 3.0" means the grid in
        all four directions extends to 3.0" giving it dimensions 6.0" x 6.0".
    number_of_steps
        The 2D dimensions of the grid (e.g. number_of_steps x number_of_steps) that the subhalo search is performed for.
    number_of_cores
        The number of cores used to perform the non-linear search grid search. If 1, each model-fit on the grid is
        performed in serial, if > 1 fits are distributed in parallel using the Python multiprocessing module.
    unique_tag
        The unique tag for this model-fit, which will be given a unique entry in the sqlite database and also acts as
        the folder after the path prefix and before the search name. This is typically the name of the dataset.
    """
    """
    __Model + Search + Analysis + Model-Fit (Search 1)__

    In search 1 of the SUBHALO PIPELINE we fit a lens model where:

     - The lens galaxy mass is modeled using MASS PIPELINE's mass distribution [Priors initialized from MASS PIPELINE].
     - The source galaxy's light is parametric or an inversion depending on the previous MASS PIPELINE [Model and 
     priors initialized from MASS PIPELINE].

    This search aims to accurately estimate the lens mass model, using the improved mass model priors and source model 
    of the MASS PIPELINE. This model will be used to perform Bayesian model comparison with models that include a 
    subhalo, to determine if a subhalo is detected.
    """

    source = slam_util.source__from_result_model_if_parametric(
        result=mass_results.last, setup_hyper=setup_hyper)

    model = af.Collection(
        galaxies=af.Collection(lens=mass_results.last.model.galaxies.lens,
                               source=source),
        hyper_image_sky=setup_hyper.hyper_image_sky_from_result(
            result=mass_results.last, as_model=True),
        hyper_background_noise=setup_hyper.hyper_background_noise_from_result(
            result=mass_results.last),
    )

    search = af.DynestyStatic(
        path_prefix=path_prefix,
        name="subhalo[1]_mass[total_refine]",
        unique_tag=unique_tag,
        session=session,
        nlive=100,
    )

    result_1 = search.fit(model=model, analysis=analysis)
    """
    __Model + Search + Analysis + Model-Fit (Search 2)__

    In search 2 of the SUBHALO PIPELINE we perform a [number_of_steps x number_of_steps] grid search of non-linear
    searches where:

     - The lens galaxy mass is modeled using MASS PIPELINE's mass distribution [Priors initialized from MASS PIPELINE].
     - The source galaxy's light is parametric or an inversion depending on the previous MASS PIPELINE [Model and 
     priors initialized from MASS PIPELINE].
     - The subhalo redshift is fixed to that of the lens galaxy.
     - Each grid search varies the subhalo (y,x) coordinates and mass as free parameters.
     - The priors on these (y,x) coordinates are UniformPriors, with limits corresponding to the grid-cells.

    This search aims to detect a dark matter subhalo.
    """

    subhalo = af.Model(al.Galaxy,
                       redshift=result_1.instance.galaxies.lens.redshift,
                       mass=subhalo_mass)

    subhalo.mass.mass_at_200 = af.LogUniformPrior(lower_limit=1.0e6,
                                                  upper_limit=1.0e11)
    subhalo.mass.centre_0 = af.UniformPrior(lower_limit=-grid_dimension_arcsec,
                                            upper_limit=grid_dimension_arcsec)
    subhalo.mass.centre_1 = af.UniformPrior(lower_limit=-grid_dimension_arcsec,
                                            upper_limit=grid_dimension_arcsec)

    subhalo.mass.redshift_object = result_1.instance.galaxies.lens.redshift
    subhalo.mass.redshift_source = result_1.instance.galaxies.source.redshift

    source = slam_util.source__from_result_model_if_parametric(
        result=mass_results.last, setup_hyper=setup_hyper)

    model = af.Collection(
        galaxies=af.Collection(lens=mass_results.last.model.galaxies.lens,
                               subhalo=subhalo,
                               source=source),
        hyper_image_sky=setup_hyper.hyper_image_sky_from_result(
            result=mass_results.last, as_model=True),
        hyper_background_noise=setup_hyper.hyper_background_noise_from_result(
            result=mass_results.last),
    )

    search = af.DynestyStatic(
        path_prefix=path_prefix,
        name="subhalo[2]_mass[total]_source_subhalo[search_lens_plane]",
        unique_tag=unique_tag,
        session=session,
        nlive=50,
        walks=5,
        facc=0.2,
    )

    subhalo_grid_search = af.SearchGridSearch(search=search,
                                              number_of_steps=number_of_steps,
                                              number_of_cores=number_of_cores)

    grid_search_result = subhalo_grid_search.fit(
        model=model,
        analysis=analysis,
        grid_priors=[
            model.galaxies.subhalo.mass.centre_0,
            model.galaxies.subhalo.mass.centre_1,
        ],
    )
    """
    __Model + Search + Analysis + Model-Fit (Search 3)__

    In search 3 of the SUBHALO PIPELINE we refit the lens and source models above but now including a subhalo, where 
    the subhalo model is initalized from the highest evidence model of the subhalo grid search.

     - The lens galaxy mass is modeled using MASS PIPELINE's mass distribution [Priors initialized from MASS PIPELINE].
     - The source galaxy's light is parametric or an inversion depending on the previous MASS PIPELINE [Model and 
     priors initialized from MASS PIPELINE].
     - The subhalo redshift is fixed to that of the lens galaxy.
     - Each grid search varies the subhalo (y,x) coordinates and mass as free parameters.
     - The priors on these (y,x) coordinates are UniformPriors, with limits corresponding to the grid-cells.

    This search aims to refine the parameter estimates and errors of a dark matter subhalo detected in the grid search
    above.
    """

    subhalo = af.Model(al.Galaxy,
                       redshift=result_1.instance.galaxies.lens.redshift,
                       mass=subhalo_mass)

    subhalo.mass.mass_at_200 = (
        grid_search_result.model.galaxies.subhalo.mass.mass_at_200)
    subhalo.mass.centre = grid_search_result.model.galaxies.subhalo.mass.centre

    subhalo.mass.redshift_object = grid_search_result.instance.galaxies.lens.redshift
    subhalo.mass.redshift_source = grid_search_result.instance.galaxies.source.redshift

    model = af.Collection(
        galaxies=af.Collection(
            lens=grid_search_result.model.galaxies.lens,
            subhalo=subhalo,
            source=grid_search_result.model.galaxies.source,
        ),
        hyper_image_sky=grid_search_result.instance.hyper_image_sky,
        hyper_background_noise=grid_search_result.instance.
        hyper_background_noise,
    )

    search = af.DynestyStatic(
        name="subhalo[3]_subhalo[single_plane_refine]",
        unique_tag=unique_tag,
        session=session,
        path_prefix=path_prefix,
        nlive=100,
    )

    result_3 = search.fit(model=model, analysis=analysis)

    return af.ResultsCollection([result_1, grid_search_result, result_3])