Exemplo n.º 1
0
    def save_attributes_for_aggregator(self, paths: af.DirectoryPaths):
        """
        Before the non-linear search begins, this routine saves attributes of the `Analysis` object to the `pickles`
        folder such that they can be load after the analysis using PyAutoFit's database and aggregator tools.

        For this analysis, it uses the `AnalysisDataset` object's method to output the following:

        - The dataset's data.
        - The dataset's noise-map.
        - The settings associated with the dataset.
        - The Cosmology.
        - Its mask.

        It is common for these attributes to be loaded by many of the template aggregator functions given in the
        `aggregator` modules. For example, when using the database tools to perform a fit, the default behaviour is for
        the dataset, settings and other attributes necessary to perform the fit to be loaded via the pickle files
        output by this function.

        Parameters
        ----------
        paths
            The PyAutoFit paths object which manages all paths, e.g. where the non-linear search outputs are stored,
            visualization, and the pickled objects used by the aggregator output by this function.
        """
        paths.save_object("data", self.dataset.data)
        paths.save_object("noise_map", self.dataset.noise_map)
        paths.save_object("settings_dataset", self.dataset.settings)
        paths.save_object("mask", self.dataset.mask)
        paths.save_object("cosmology", self.cosmology)
Exemplo n.º 2
0
    def save_stochastic_outputs(self, paths: af.DirectoryPaths,
                                samples: af.OptimizerSamples):

        stochastic_log_evidences_json_file = path.join(
            paths.output_path, "stochastic_log_evidences.json")

        try:
            with open(stochastic_log_evidences_json_file, "r") as f:
                stochastic_log_evidences = np.asarray(json.load(f))
        except FileNotFoundError:
            instance = samples.max_log_likelihood_instance
            stochastic_log_evidences = self.stochastic_log_evidences_for_instance(
                instance=instance)

        if stochastic_log_evidences is None:
            return

        with open(stochastic_log_evidences_json_file, "w") as outfile:
            json.dump(
                [float(evidence) for evidence in stochastic_log_evidences],
                outfile)

        paths.save_object("stochastic_log_evidences", stochastic_log_evidences)

        visualizer = vis.Visualizer(visualize_path=paths.image_path)

        visualizer.visualize_stochastic_histogram(
            log_evidences=stochastic_log_evidences,
            max_log_evidence=np.max(samples.log_likelihood_list),
            histogram_bins=self.settings_lens.stochastic_histogram_bins,
        )
Exemplo n.º 3
0
    def save_attributes_for_aggregator(self, paths: af.DirectoryPaths):
        """
        Before the model-fit begins, this routine saves attributes of the `Analysis` object to the `pickles` folder
        such that they can be load after the analysis using PyAutoFit's database and aggregator tools.

        For this analysis, it uses the `AnalysisDataset` object's method to output the following:

        - The dataset's data.
        - The dataset's noise-map.
        - The settings associated with the dataset.
        - The settings associated with the inversion.
        - The settings associated with the pixelization.
        - The Cosmology.
        - The hyper dataset's model image and galaxy images, if used.

        This function also outputs attributes specific to an interferometer dataset:

       - Its uv-wavelengths
       - Its real space mask.

        It is common for these attributes to be loaded by many of the template aggregator functions given in the
        `aggregator` modules. For example, when using the database tools to perform a fit, the default behaviour is for
        the dataset, settings and other attributes necessary to perform the fit to be loaded via the pickle files
        output by this function.

        Parameters
        ----------
        paths
            The PyAutoFit paths object which manages all paths, e.g. where the non-linear search outputs are stored, visualization,
            and the pickled objects used by the aggregator output by this function.
        """
        super().save_attributes_for_aggregator(paths=paths)

        paths.save_object("uv_wavelengths", self.dataset.uv_wavelengths)
        paths.save_object("real_space_mask", self.dataset.real_space_mask)
Exemplo n.º 4
0
    def save_stochastic_outputs(self, paths: af.DirectoryPaths, samples: af.Samples):
        """
        Certain `Inversion`'s have stochasticity in their log likelihood estimate (e.g. due to how different KMeans
        seeds change the pixelization constructed by a `VoronoiBrightnessImage` pixelization).

        This function computes the stochastic log likelihoods of such a model, which are the log likelihoods computed
        using the same model but with different KMeans seeds.

        It outputs these stochastic likelihoods to a format which can be loaded via PyAutoFit's database tools, and
        may also be loaded if this analysis is extended with a stochastic model-fit that applies a log likelihood cap.

        This function also outputs visualization showing a histogram of the stochastic likelihood distribution.

        Parameters
        ----------
        paths
            The PyAutoFit paths object which manages all paths, e.g. where the non-linear search outputs are stored,
            visualization and the pickled objects used by the aggregator output by this function.
        samples
            A PyAutoFit object which contains the samples of the non-linear search, for example the chains of an MCMC
            run of samples of the nested sampler.
        """
        stochastic_log_likelihoods_json_file = path.join(
            paths.output_path, "stochastic_log_likelihoods.json"
        )

        try:
            with open(stochastic_log_likelihoods_json_file, "r") as f:
                stochastic_log_likelihoods = np.asarray(json.load(f))
        except FileNotFoundError:
            instance = samples.max_log_likelihood_instance
            stochastic_log_likelihoods = self.stochastic_log_likelihoods_via_instance_from(
                instance=instance
            )

        if stochastic_log_likelihoods is None:
            return

        with open(stochastic_log_likelihoods_json_file, "w") as outfile:
            json.dump(
                [float(evidence) for evidence in stochastic_log_likelihoods], outfile
            )

        paths.save_object("stochastic_log_likelihoods", stochastic_log_likelihoods)

        visualizer = Visualizer(visualize_path=paths.image_path)

        visualizer.visualize_stochastic_histogram(
            stochastic_log_likelihoods=stochastic_log_likelihoods,
            max_log_evidence=np.max(samples.log_likelihood_list),
            histogram_bins=self.settings_lens.stochastic_histogram_bins,
        )
Exemplo n.º 5
0
    def save_attributes_for_aggregator(self, paths: af.DirectoryPaths):

        super().save_attributes_for_aggregator(paths=paths)

        paths.save_object("uv_wavelengths", self.dataset.uv_wavelengths)
        paths.save_object("real_space_mask", self.dataset.real_space_mask)
        paths.save_object("positions", self.positions)
Exemplo n.º 6
0
    def save_attributes_for_aggregator(self, paths: af.DirectoryPaths):

        super().save_attributes_for_aggregator(paths=paths)

        paths.save_object("psf", self.dataset.psf_unormalized)
        paths.save_object("mask", self.dataset.mask)
        paths.save_object("positions", self.positions)
Exemplo n.º 7
0
    def save_results_for_aggregator(
        self, paths: af.DirectoryPaths, result: ResultDataset
    ):
        """
        At the end of a model-fit,  this routine saves attributes of the `Analysis` object to the `pickles`
        folder such that they can be loaded after the analysis using PyAutoFit's database and aggregator tools.

        For this analysis it outputs the following:

        - The stochastic log likelihoods of a pixelization, provided the pixelization has functionality that can
        compute likelihoods for different KMeans seeds and grids (e.g. `VoronoiBrightnessImage).

        Parameters
        ----------
        paths
            The PyAutoFit paths object which manages all paths, e.g. where the non-linear search outputs are stored,
            visualization and the pickled objects used by the aggregator output by this function.
        result
            The result of a lens model fit, including the non-linear search, samples and maximum likelihood tracer.
        """
        pixelization = ag.util.model.pixelization_from(model=result.model)

        if pixelization is not None:

            tracer = result.max_log_likelihood_tracer

            sparse_image_plane_grid_pg_list = tracer.to_inversion.sparse_image_plane_grid_pg_list_from(
                grid=self.dataset.grid_pixelized
            )

            paths.save_object(
                "preload_sparse_grids_of_planes", sparse_image_plane_grid_pg_list
            )

        if conf.instance["general"]["hyper"]["stochastic_outputs"]:
            if pixelization is not None:
                if pixelization.is_stochastic:
                    self.save_stochastic_outputs(paths=paths, samples=result.samples)
Exemplo n.º 8
0
    def save_attributes_for_aggregator(self, paths: af.DirectoryPaths):
        paths.save_object("dataset", self.dataset)
        paths.save_object("mask", self.dataset.mask)

        self.save_settings(paths=paths)

        attributes = self.make_attributes()

        paths.save_object("attributes", attributes)
Exemplo n.º 9
0
    def save_attributes_for_aggregator(self, paths: af.DirectoryPaths):
        """
        Before the non-linear search begins, this routine saves attributes of the `Analysis` object to the `pickles`
        folder such that they can be load after the analysis using PyAutoFit's database and aggregator tools.

        For this analysis, it uses the `AnalysisDataset` object's method to output the following:

        - The dataset's data.
        - The dataset's noise-map.
        - The settings associated with the dataset.
        - The settings associated with the inversion.
        - The settings associated with the pixelization.
        - The Cosmology.
        - The hyper dataset's model image and galaxy images, if used.

        This function also outputs attributes specific to an imaging dataset:

       - Its PSF.
       - Its mask.
       - The positions of the brightest pixels in the lensed source which are used to discard mass models.
       - The preloaded image-plane source plane pixelization if used by the analysis. This ensures that differences in
       the scikit-learn library do not lead to different pixelizations being computed if results are transferred from
       a HPC to laptop.

        It is common for these attributes to be loaded by many of the template aggregator functions given in the
        `aggregator` modules. For example, when using the database tools to perform a fit, the default behaviour is for
        the dataset, settings and other attributes necessary to perform the fit to be loaded via the pickle files
        output by this function.

        Parameters
        ----------
        paths
            The PyAutoFit paths object which manages all paths, e.g. where the non-linear search outputs are stored,
            visualization, and the pickled objects used by the aggregator output by this function.
        """
        super().save_attributes_for_aggregator(paths=paths)

        paths.save_object("psf", self.dataset.psf_unormalized)
        paths.save_object("mask", self.dataset.mask)
        paths.save_object("positions", self.positions)
        if self.preloads.sparse_image_plane_grid_pg_list is not None:
            paths.save_object(
                "preload_sparse_grids_of_planes",
                self.preloads.sparse_image_plane_grid_pg_list,
            )
Exemplo n.º 10
0
    def save_attributes_for_aggregator(self, paths: af.DirectoryPaths):

        paths.save_object("dataset", self.point_dict)
Exemplo n.º 11
0
 def save_settings(self, paths: af.DirectoryPaths):
     paths.save_object("settings_dataset", self.dataset.settings)
     paths.save_object("settings_inversion", self.settings_inversion)
     paths.save_object("settings_pixelization", self.settings_pixelization)
Exemplo n.º 12
0
    def save_attributes_for_aggregator(self, paths: af.DirectoryPaths):

        paths.save_object("data", self.dataset.data)
        paths.save_object("noise_map", self.dataset.noise_map)
        paths.save_object("settings_dataset", self.dataset.settings)
        paths.save_object("settings_inversion", self.settings_inversion)
        paths.save_object("settings_pixelization", self.settings_pixelization)

        paths.save_object("cosmology", self.cosmology)

        if self.hyper_model_image is not None:
            paths.save_object("hyper_model_image", self.hyper_model_image)

        if self.hyper_galaxy_image_path_dict is not None:
            paths.save_object(
                "hyper_galaxy_image_path_dict", self.hyper_galaxy_image_path_dict
            )
Exemplo n.º 13
0
    def save_attributes_for_aggregator(self, paths: af.DirectoryPaths):
        """
        Before the model-fit via the non-linear search begins, this routine saves attributes of the `Analysis` object
        to the `pickles` folder such that they can be load after the analysis using PyAutoFit's database and aggregator
        tools.

        For this analysis the following are output:

        - The dataset's data.
        - The dataset's noise-map.
        - The settings associated with the dataset.
        - The settings associated with the inversion.
        - The settings associated with the pixelization.
        - The Cosmology.
        - The hyper dataset's model image and galaxy images, if used.

        It is common for these attributes to be loaded by many of the template aggregator functions given in the
        `aggregator` modules. For example, when using the database tools to reperform a fit, this will by default
        load the dataset, settings and other attributes necessary to perform a fit using the attributes output by
        this function.

        Parameters
        ----------
        paths
            The PyAutoFit paths object which manages all paths, e.g. where the non-linear search outputs are stored, visualization,
            and the pickled objects used by the aggregator output by this function.
        """
        paths.save_object("data", self.dataset.data)
        paths.save_object("noise_map", self.dataset.noise_map)
        paths.save_object("settings_dataset", self.dataset.settings)
        paths.save_object("settings_inversion", self.settings_inversion)
        paths.save_object("settings_pixelization", self.settings_pixelization)

        paths.save_object("cosmology", self.cosmology)

        if self.hyper_model_image is not None:
            paths.save_object("hyper_model_image", self.hyper_model_image)

        if self.hyper_galaxy_image_path_dict is not None:
            paths.save_object(
                "hyper_galaxy_image_path_dict", self.hyper_galaxy_image_path_dict
            )
Exemplo n.º 14
0
    def save_settings(self, paths: af.DirectoryPaths):

        super().save_settings(paths=paths)

        paths.save_object("settings_lens", self.settings_lens)