Ejemplo n.º 1
0
    def test__samples_from_model(self):
        pyswarms = af.PySwarmsGlobal()
        pyswarms.paths = af.DirectoryPaths(
            path_prefix=path.join("non_linear", "pyswarms"))
        pyswarms.paths._identifier = "tag"

        model = af.ModelMapper(mock_class=af.m.MockClassx3)
        model.mock_class.one = af.LogUniformPrior(lower_limit=1e-8,
                                                  upper_limit=100.0)
        model.mock_class.two = af.LogUniformPrior(lower_limit=1e-8,
                                                  upper_limit=100.0)
        model.mock_class.three = af.LogUniformPrior(lower_limit=1e-8,
                                                    upper_limit=100.0)
        # model.mock_class.four = af.LogUniformPrior(lower_limit=1e-8, upper_limit=100.0)

        samples = pyswarms.samples_from(model=model)

        assert isinstance(samples.parameter_lists, list)
        assert isinstance(samples.parameter_lists[0], list)
        assert isinstance(samples.log_likelihood_list, list)
        assert isinstance(samples.log_prior_list, list)
        assert isinstance(samples.log_posterior_list, list)

        assert samples.parameter_lists[0] == pytest.approx(
            [50.1254, 1.04626, 10.09456], 1.0e-4)

        assert samples.log_likelihood_list[0] == pytest.approx(
            -5071.80777, 1.0e-4)
        assert samples.log_posterior_list[0] == pytest.approx(
            -5070.73298, 1.0e-4)
        assert samples.weight_list[0] == 1.0

        assert len(samples.parameter_lists) == 500
        assert len(samples.log_likelihood_list) == 500
Ejemplo n.º 2
0
analysis = a.Analysis(data=data, noise_map=noise_map)
"""
__Search__

We now create and run the `PySwarmsGlobal` object which acts as our non-linear search. 

We manually specify all of the PySwarms settings, descriptions of which are provided at the following webpage:

 https://pyswarms.readthedocs.io/en/latest/api/pyswarms.single.html#module-pyswarms.single.global_best
"""
pso = af.PySwarmsGlobal(
    path_prefix="searches",
    name="PySwarmsGlobal",
    n_particles=50,
    iters=1000,
    cognitive=0.5,
    social=0.3,
    inertia=0.9,
    ftol=-np.inf,
    iterations_per_update=1000,
    number_of_cores=1,
)

result = pso.fit(model=model, analysis=analysis)
"""
__Result__

The result object returned by the fit provides information on the results of the non-linear search. Lets use it to
compare the maximum log likelihood `Gaussian` to the data.
"""
model_data = result.max_log_likelihood_instance.profile_1d_via_xvalues_from(
    xvalues=np.arange(data.shape[0]))
Ejemplo n.º 3
0
We create the Analysis as per using.
"""
analysis = al.AnalysisImaging(dataset=imaging)
"""
__Search__

Below we use `PySwarmsGlobal` to fit the lens model, using the model where the particles start as described above. 
See the PySwarms docs for a description of what the input parameters below do and what the `Global` search technique is.
"""
search = af.PySwarmsGlobal(
    path_prefix=path.join("imaging", "searches"),
    name="PySwarmsGlobal",
    unique_tag=dataset_name,
    n_particles=30,
    iters=300,
    cognitive=0.5,
    social=0.3,
    inertia=0.9,
    ftol=-np.inf,
    iterations_per_update=1000,
    number_of_cores=1,
)

result = search.fit(model=model, analysis=analysis)
"""
__Result__

We can use an `PySwarmsPlotter` to create a corner plot, which shows the probability density function (PDF) of every
parameter in 1D and 2D.
"""
pyswarms_plotter = aplt.PySwarmsPlotter(samples=result.samples)
Ejemplo n.º 4
0
    def test__loads_from_config_file_correct(self):
        pso = af.PySwarmsGlobal(
            prior_passer=af.PriorPasser(sigma=2.0,
                                        use_errors=False,
                                        use_widths=False),
            n_particles=51,
            iters=2001,
            cognitive=0.4,
            social=0.5,
            inertia=0.6,
            initializer=af.InitializerBall(lower_limit=0.2, upper_limit=0.8),
            iterations_per_update=10,
            number_of_cores=2,
        )

        assert pso.prior_passer.sigma == 2.0
        assert pso.prior_passer.use_errors is False
        assert pso.prior_passer.use_widths is False
        assert pso.config_dict_search["n_particles"] == 51
        assert pso.config_dict_search["cognitive"] == 0.4
        assert pso.config_dict_run["iters"] == 2001
        assert isinstance(pso.initializer, af.InitializerBall)
        assert pso.initializer.lower_limit == 0.2
        assert pso.initializer.upper_limit == 0.8
        assert pso.iterations_per_update == 10
        assert pso.number_of_cores == 2

        pso = af.PySwarmsGlobal()

        assert pso.prior_passer.sigma == 3.0
        assert pso.prior_passer.use_errors is True
        assert pso.prior_passer.use_widths is True
        assert pso.config_dict_search["n_particles"] == 50
        assert pso.config_dict_search["cognitive"] == 0.1
        assert pso.config_dict_run["iters"] == 2000
        assert isinstance(pso.initializer, af.InitializerPrior)
        assert pso.iterations_per_update == 11
        assert pso.number_of_cores == 1

        pso = af.PySwarmsLocal(
            prior_passer=af.PriorPasser(sigma=2.0,
                                        use_errors=False,
                                        use_widths=False),
            n_particles=51,
            iters=2001,
            cognitive=0.4,
            social=0.5,
            inertia=0.6,
            number_of_k_neighbors=4,
            minkowski_p_norm=1,
            initializer=af.InitializerBall(lower_limit=0.2, upper_limit=0.8),
            iterations_per_update=10,
            number_of_cores=2,
        )

        assert pso.prior_passer.sigma == 2.0
        assert pso.prior_passer.use_errors is False
        assert pso.prior_passer.use_widths is False
        assert pso.config_dict_search["n_particles"] == 51
        assert pso.config_dict_search["cognitive"] == 0.4
        assert pso.config_dict_run["iters"] == 2001
        assert isinstance(pso.initializer, af.InitializerBall)
        assert pso.initializer.lower_limit == 0.2
        assert pso.initializer.upper_limit == 0.8
        assert pso.iterations_per_update == 10
        assert pso.number_of_cores == 2

        pso = af.PySwarmsLocal()

        assert pso.prior_passer.sigma == 3.0
        assert pso.prior_passer.use_errors is True
        assert pso.prior_passer.use_widths is True
        assert pso.config_dict_search["n_particles"] == 50
        assert pso.config_dict_search["cognitive"] == 0.1
        assert pso.config_dict_run["iters"] == 2000
        assert isinstance(pso.initializer, af.InitializerPrior)
        assert pso.iterations_per_update == 11
        assert pso.number_of_cores == 1
Ejemplo n.º 5
0
dataset_path = path.join("dataset", "example_1d", "gaussian_x1")
data = af.util.numpy_array_from_json(
    file_path=path.join(dataset_path, "data.json"))
noise_map = af.util.numpy_array_from_json(
    file_path=path.join(dataset_path, "noise_map.json"))

model = af.Model(m.Gaussian)

model.centre = af.UniformPrior(lower_limit=0.0, upper_limit=100.0)
model.normalization = af.UniformPrior(lower_limit=1e-2, upper_limit=1e2)
model.sigma = af.UniformPrior(lower_limit=0.0, upper_limit=30.0)

analysis = a.Analysis(data=data, noise_map=noise_map)

pyswarms = af.PySwarmsGlobal(path_prefix=path.join("plot"),
                             name="PySwarmsPlotter",
                             n_particles=50,
                             iters=10)

result = pyswarms.fit(model=model, analysis=analysis)

samples = result.samples
"""
We now pass the samples to a `PySwarmsPlotter` which will allow us to use pyswarms's in-built plotting libraries to 
make figures.

The pyswarms readthedocs describes fully all of the methods used below 

 - https://pyswarms.readthedocs.io/en/latest/api/pyswarms.utils.plotters.html
 
In all the examples below, we use the `kwargs` of this function to pass in any of the input parameters that are 
described in the API docs.
Ejemplo n.º 6
0
we'll use the Particle Swarm Optimization algorithm PySwarms. For a full description of PySwarms, checkout its Github 
and readthedocs webpages:

https://github.com/ljvmiranda921/pyswarms
https://pyswarms.readthedocs.io/en/latest/index.html

**PyAutoFit** extends *PySwarms* by allowing runs to be terminated and resumed from the point of termination, as well
as providing different options for the initial distribution of particles.

"""
pso = af.PySwarmsGlobal(
    path_prefix=path.join("overview", "simple"),
    name="PySwarmsGlobal",
    n_particles=50,
    iters=100,
    cognitive=0.5,
    social=0.3,
    inertia=0.9,
    ftol=-np.inf,
    initializer=af.InitializerPrior(),
    number_of_cores=1,
)
result = pso.fit(model=model, analysis=analysis)

"""
__Result__

The result object returned by PSO is again very similar in structure to previous results.
"""
model_data = result.max_log_likelihood_instance.profile_1d_via_xvalues_from(
    xvalues=np.arange(data.shape[0])
)
Ejemplo n.º 7
0
https://github.com/ljvmiranda921/pyswarms
https://pyswarms.readthedocs.io/en/latest/index.html

**PyAutoFit** extends *PySwarms* by allowing runs to be terminated and resumed from the point of termination, as well
as providing different options for the initial distribution of particles.

"""

# %%
pso = af.PySwarmsGlobal(
    n_particles=50,
    iters=100,
    cognitive=0.5,
    social=0.3,
    inertia=0.9,
    ftol=-np.inf,
    initializer=af.InitializerPrior(),
    number_of_cores=1,
    paths=af.Paths(folders=["examples", "simple"]),
)
result = pso.fit(model=model, analysis=analysis)

# %%
"""
__Result__

The result object returned by PSO is again very similar in structure to previous results.
"""

# %%
# print(f"Working Directory has been set to `{workspace_path}`")

from os import path

import autofit as af
import autolens as al
import autolens.plot as aplt
"""
First, lets create a result via pyswarms by repeating the simple model-fit that is performed in 
the `modeling/mass_total__source_parametric.py` example.
"""
dataset_name = "mass_sie__source_sersic"

search = af.PySwarmsGlobal(
    path_prefix=path.join("plot", "PySwarmsPlotter"),
    name="PySwarms",
    n_particles=50,
    iters=10,
)

dataset_path = path.join("dataset", "imaging", "no_lens_light", dataset_name)

imaging = al.Imaging.from_fits(
    image_path=path.join(dataset_path, "image.fits"),
    psf_path=path.join(dataset_path, "psf.fits"),
    noise_map_path=path.join(dataset_path, "noise_map.fits"),
    pixel_scales=0.1,
)

mask = al.Mask2D.circular(shape_native=imaging.shape_native,
                          pixel_scales=imaging.pixel_scales,
                          radius=3.0)