Ejemplo n.º 1
0
def make_factor_graph_model():
    model_factor_1 = g.AnalysisFactor(af.Collection(one=af.UniformPrior()),
                                      af.m.MockAnalysis())
    model_factor_2 = g.AnalysisFactor(af.Collection(one=af.UniformPrior()),
                                      af.m.MockAnalysis())

    return g.FactorGraphModel(model_factor_1, model_factor_2)
Ejemplo n.º 2
0
def make_non_trivial_model():
    one = af.Model(af.Gaussian)
    two = af.Model(af.Gaussian)

    one.centre = two.centre

    model_factor_1 = g.AnalysisFactor(one, af.m.MockAnalysis())
    model_factor_2 = g.AnalysisFactor(two, af.m.MockAnalysis())

    return g.FactorGraphModel(model_factor_1, model_factor_2)
Ejemplo n.º 3
0
def _test_gaussian():
    n_observations = 100
    x = np.arange(n_observations)
    y = make_data(Gaussian(centre=50.0, normalization=25.0, sigma=10.0), x)

    prior_model = af.PriorModel(
        Gaussian,
        # centre=af.GaussianPrior(mean=50, sigma=10),
        # normalization=af.GaussianPrior(mean=25, sigma=10),
        sigma=af.GaussianPrior(mean=10, sigma=10),
        centre=af.UniformPrior(lower_limit=30, upper_limit=70),
        normalization=af.UniformPrior(lower_limit=15, upper_limit=35),
        # sigma=af.UniformPrior(lower_limit=5, upper_limit=15),
    )

    factor_model = ep.AnalysisFactor(prior_model, analysis=Analysis(x=x, y=y))

    # optimiser = ep.LaplaceOptimiser(
    #     transform_cls=DiagonalMatrix
    # )
    optimiser = af.DynestyStatic()
    model = factor_model.optimise(optimiser)

    assert model.centre.mean == pytest.approx(50, rel=0.1)
    assert model.normalization.mean == pytest.approx(25, rel=0.1)
    assert model.sigma.mean == pytest.approx(10, rel=0.1)
Ejemplo n.º 4
0
def make_factor_model(prior_model):
    class MockAnalysis(af.Analysis):
        @staticmethod
        def log_likelihood_function(*_):
            return 1

    return ep.AnalysisFactor(prior_model, analysis=af.m.MockAnalysis())
Ejemplo n.º 5
0
def test_model_factor(data, centres):
    y = data[0]
    centre_argument = af.GaussianPrior(mean=50, sigma=20)
    prior_model = af.PriorModel(af.Gaussian,
                                centre=centre_argument,
                                normalization=20,
                                sigma=5)
    factor = g.AnalysisFactor(prior_model, analysis=Analysis(x=x, y=y))
    laplace = g.LaplaceOptimiser()

    gaussian = factor.optimise(laplace, max_steps=10)
    assert gaussian.centre.mean == pytest.approx(centres[0], abs=0.1)
Ejemplo n.º 6
0
def test_full_fit(centre_model, data, centres):
    graph = g.FactorGraphModel()
    for i, y in enumerate(data):
        prior_model = af.PriorModel(
            af.Gaussian,
            centre=af.GaussianPrior(mean=100, sigma=20),
            normalization=20,
            sigma=5,
        )
        graph.add(g.AnalysisFactor(prior_model, analysis=Analysis(x=x, y=y)))
        centre_model.add_drawn_variable(prior_model.centre)

    graph.add(centre_model)

    optimiser = g.LaplaceOptimiser()

    collection = graph.optimise(optimiser, max_steps=10).model
Ejemplo n.º 7
0
    def make_factor_model(centre: float,
                          sigma: float,
                          optimiser=None) -> ep.AnalysisFactor:
        y = make_data(
            Gaussian(centre=centre, normalization=normalization, sigma=sigma),
            x)

        prior_model = af.PriorModel(
            Gaussian,
            centre=af.UniformPrior(lower_limit=10, upper_limit=100),
            normalization=normalization_prior,
            sigma=af.UniformPrior(lower_limit=0, upper_limit=20),
        )

        return ep.AnalysisFactor(prior_model,
                                 analysis=Analysis(x=x, y=y),
                                 optimiser=optimiser)
Ejemplo n.º 8
0
def test_trivial():
    prior = af.UniformPrior(lower_limit=10, upper_limit=20)

    prior_model = af.Collection(value=prior)

    class TrivialAnalysis(af.Analysis):
        def log_likelihood_function(self, instance):
            result = -((instance.value - 14)**2)
            return result

    factor_model = ep.AnalysisFactor(prior_model, analysis=TrivialAnalysis())

    optimiser = ep.LaplaceOptimiser()
    # optimiser = af.DynestyStatic()
    model = factor_model.optimise(optimiser)

    assert model.value.mean == pytest.approx(14, rel=0.1)
Ejemplo n.º 9
0
def test_gaussian():
    n_observations = 100
    x = np.arange(n_observations)
    y = make_data(Gaussian(centre=50.0, normalization=25.0, sigma=10.0), x)

    prior_model = af.PriorModel(
        Gaussian,
        centre=af.GaussianPrior(mean=50, sigma=20),
        normalization=af.GaussianPrior(mean=25, sigma=10),
        sigma=af.GaussianPrior(mean=10, sigma=10),
    )

    factor_model = ep.AnalysisFactor(prior_model, analysis=Analysis(x=x, y=y))

    laplace = ep.LaplaceOptimiser()
    model = factor_model.optimise(laplace)

    assert model.centre.mean == pytest.approx(50, rel=0.1)
    assert model.normalization.mean == pytest.approx(25, rel=0.1)
    assert model.sigma.mean == pytest.approx(10, rel=0.1)
Ejemplo n.º 10
0
def test_pickle():
    prior_model = af.Model(
        Gaussian
    )
    analysis_factor = ep.AnalysisFactor(
        prior_model,
        analysis=Analysis(
            x=1,
            y=2
        ),
    )

    analysis_factor = dill.loads(
        dill.dumps(analysis_factor)
    )

    assert isinstance(
        analysis_factor,
        ep.AnalysisFactor
    )
Ejemplo n.º 11
0
def test_optimise(model_gaussian_x1, prior):
    optimizer = af.DynestyStatic(
        maxcall=10
    )
    analysis = af.m.MockAnalysis()
    factor = g.AnalysisFactor(
        model_gaussian_x1,
        analysis
    )
    prior_factor = factor.prior_factors[0]
    result, status = optimizer.optimise(
        prior_factor,
        factor.mean_field_approximation()
    )

    assert status

    optimized_mean = list(result.mean_field.values())[0].mean
    assert optimized_mean == pytest.approx(
        prior.mean,
        rel=0.1
    )
Ejemplo n.º 12
0
    def make_factor_model(
            centre: float, sigma: float, optimiser=None
    ) -> ep.AnalysisFactor:
        """
        We'll make a LikelihoodModel for each Gaussian we're fitting.

        First we'll make the actual data to be fit.

        Note that the normalization value is shared.
        """
        y = make_data(
            Gaussian(centre=centre, normalization=normalization, sigma=sigma), x
        )

        """
        Next we need a prior model.
    
        Note that the normalization prior is shared.
        """
        prior_model = af.PriorModel(
            Gaussian,
            centre=af.GaussianPrior(mean=50, sigma=20),
            normalization=normalization_prior,
            sigma=af.GaussianPrior(mean=10, sigma=10),
        )

        """
        Finally we combine the likelihood function with the prior model to produce a likelihood
        factor - this will be converted into a ModelFactor which is like any other factor in the
        factor graph.
        
        We can also pass a custom optimiser in here that will be used to fit the factor instead
        of the default optimiser.
        """
        return ep.AnalysisFactor(
            prior_model, analysis=Analysis(x=x, y=y), optimiser=optimiser
        )
Ejemplo n.º 13
0
def test_visualize():
    analysis = Analysis()

    gaussian = af.Model(af.Gaussian)

    analysis_factor = g.AnalysisFactor(
        prior_model=gaussian,
        analysis=analysis
    )

    factor_graph = g.FactorGraphModel(
        analysis_factor
    )

    model = factor_graph.global_prior_model
    instance = model.instance_from_prior_medians()

    factor_graph.visualize(
        af.DirectoryPaths(),
        instance,
        False
    )

    assert analysis.did_call_visualise is True
Ejemplo n.º 14
0
def make_model_factor_1():
    return g.AnalysisFactor(af.Model(af.Gaussian), af.m.MockAnalysis())
Ejemplo n.º 15
0
def make_factor_model(prior_model, analysis):
    return g.AnalysisFactor(prior_model, analysis=analysis)
The point where a `Model` and `Analysis` class meet is called an `AnalysisFactor`. 

This term is used to denote that we are composing a graphical model, which is commonly termed a 'factor graph'. A 
factor defines a node on this graph where we have some data, a model, and we fit the two together. The 'links' between 
these different nodes then define the global model we are fitting.

Each `AnalysisFactor` is also assigned its own `search`. This is because the graphical modeling framework performs a 
model-fit to each node on the factor graph (e.g. each `AnalysisFactor`) individually. Therefore, each node requires its 
own non-linear search, for which we use `DynestyStatic` as per usual. For complex graphs consisting of many nodes, one 
could easily use different searches for different nodes on the factor graph.
"""
analysis_factor_list = []

for model, analysis in zip(model_list, analysis_list):

    analysis_factor = g.AnalysisFactor(prior_model=model, analysis=analysis)

    analysis_factor_list.append(analysis_factor)
"""
__Factor Graph__

We combine our `AnalysisFactors` into one, to compose the factor graph.

So, what is a factor graph?

A factor graph defines the graphical model we have composed. For example, it defines the different model components 
that make up our model (e.g. the three `Gaussian` classes) and how their parameters are linked or shared (e.g. that
each `Gaussian` has its own unique `normalization` and `centre`, but a shared `sigma` parameter.

This is what our factor graph looks like: 
Ejemplo n.º 17
0
For complex graphs consisting of many  nodes, one could easily use different searches for different nodes on the factor 
graph.
"""
dynesty = af.DynestyStatic(
    path_prefix=path.join("imaging", "graphical"),
    name="slope",
    nlive=100,
    sample="rwalk",
)

analysis_factor_list = []

for model, analysis in zip(model_list, analysis_list):

    analysis_factor = g.AnalysisFactor(prior_model=model,
                                       analysis=analysis,
                                       search=dynesty)

    analysis_factor_list.append(analysis_factor)
"""
We again combine our `AnalysisFactors` into one, to compose the factor graph.
"""
factor_graph = g.FactorGraphModel(*analysis_factor_list)
"""
__Expectation Propagation__

We now fit the factor_graph via **PyAutoLens** and the expectation propagation (EP) framework. This fits the graphical 
odel composed in this tutorial as follows:

1) Go to the first node on the factor graph (e.g. `analysis_factor_list[0]`) and fit its model to its dataset. This is 
simply a fit of the first lens model to the first imaging dataset, the type of model-fit we are used to performing.
Ejemplo n.º 18
0
def make_model_factor_2():
    model_2 = af.Collection(one=af.UniformPrior())

    return g.AnalysisFactor(model_2, Analysis(0.0))
The hierarchical model fit uses EP, therefore we again supply each `AnalysisFactor` its own `search` and `name`.
"""
dynesty = af.DynestyStatic(nlive=100, sample="rwalk")

analysis_factor_list = []

dataset_index = 0

for model, analysis in zip(model_list, analysis_list):

    dataset_name = f"dataset_{dataset_index}"
    dataset_index += 1

    analysis_factor = g.AnalysisFactor(prior_model=model,
                                       analysis=analysis,
                                       optimiser=dynesty,
                                       name=dataset_name)

    analysis_factor_list.append(analysis_factor)
"""
__Model__

We now compose the hierarchical model that we fit, using the individual Gaussian model components we created above.

We first create a `HierarchicalFactor`, which represents the parent Gaussian distribution from which we will assume 
that the `centre` of each individual `Gaussian` dataset is drawn. 

For this parent `Gaussian`, we have to place priors on its `mean` and `sigma`, given that they are parameters in our
model we are ultimately fitting for.
"""
Ejemplo n.º 20
0
datasets which we intend to fit with each of these `Gaussians`, setting up each in an `Analysis` class that defines 
how the model is used to fit the data.

We now simply need to pair each model-component to each `Analysis` class, so that **PyAutoFit** knows that: 

- `prior_model_0` fits `data_0` via `analysis_0`.
- `prior_model_1` fits `data_1` via `analysis_1`.
- `prior_model_2` fits `data_2` via `analysis_2`.

The point where a `Model` and `Analysis` class meet is called a `AnalysisFactor`. 

This term is used to denote that we are composing a graphical model, which is commonly termed a 'factor graph'. A 
factor defines a node on this graph where we have some data, a model, and we fit the two together. The 'links' between 
these different nodes then define the global model we are fitting.
"""
analysis_factor_0 = g.AnalysisFactor(prior_model=prior_model_0,
                                     analysis=analysis_0)
analysis_factor_1 = g.AnalysisFactor(prior_model=prior_model_1,
                                     analysis=analysis_1)
analysis_factor_2 = g.AnalysisFactor(prior_model=prior_model_2,
                                     analysis=analysis_2)
"""
We combine our `AnalysisFactors` into one, to compose the factor graph.
"""
factor_graph = g.FactorGraphModel(analysis_factor_0, analysis_factor_1,
                                  analysis_factor_2)
"""
So, what is a factor graph?

A factor graph defines the graphical model we have composed. For example, it defines the different model components 
that make up our model (e.g. the three `Gaussian` classes) and how their parameters are linked or shared (e.g. that
each `Gaussian` has its own unique `normalization` and `sigma`, but a shared `centre` parameter.
Ejemplo n.º 21
0
def make_analysis_factor():
    return g.AnalysisFactor(prior_model=af.PriorModel(af.Gaussian),
                            analysis=af.m.MockAnalysis(),
                            name="AnalysisFactor0")