We import and make pipelines as per usual, albeit we'll now be doing this for multiple pipelines! We then run each pipeline, passing the results of previous pipelines to subsequent pipelines. """ from slam.imaging.with_lens_light.pipelines import source__parametric from slam.imaging.with_lens_light.pipelines import light__parametric from slam.imaging.with_lens_light.pipelines import mass__total from slam.imaging.with_lens_light.pipelines import subhalo source__parametric = source__parametric.make_pipeline(slam=slam, settings=settings) source_results = source__parametric.run(dataset=imaging, mask=mask) light__parametric = light__parametric.make_pipeline( slam=slam, settings=settings, source_results=source_results) light_results = light__parametric.run(dataset=imaging, mask=mask) mass__total = mass__total.make_pipeline( slam=slam, settings=settings, source_results=source_results, light_results=light_results, ) mass_results = mass__total.run(dataset=imaging, mask=mask) subhalo = subhalo.make_pipeline_single_plane(slam=slam, settings=settings, mass_results=mass_results) subhalo.run(dataset=imaging, mask=mask)
""" Each model-fit performed by sensitivity mapping creates a new instance of an `Analysis` class, which contains the data simulated by the `simulate_function` for that model. This requires us to write a wrapper around the PyAutoLens `Analysis` class. """ class Analysis(a.Analysis): def __init__(self, masked_imaging): super().__init__( masked_imaging=masked_imaging, settings=settings, cosmology=cosmo.Planck15 ) self.hyper_galaxy_image_path_dict = ( mass_results.last.hyper_galaxy_image_path_dict ) self.hyper_model_image = mass_results.last.hyper_model_image subhalo = subhalo.sensitivity_mapping( slam=slam, mask=mask, psf=imaging.psf, mass_results=mass_results, analysis_cls=Analysis, ) subhalo.run()