def test_to_heavy(self):
        shape = (5, 31, 55)
        B, Ny, Nx = shape
        coords = [(20, 10), (10, 30), (17, 42)]
        result = initData(shape, coords, [3, 2, 1])
        targetPsfImage, psfImages, images, channels, seds, morphs, targetPsf, psfs = result
        images = images.astype(np.float32)
        seds = seds.astype(np.float32)

        frame = mes.LsstFrame(shape, psfs=targetPsfImage[None])
        observation = mes.LsstObservation(images, psfs=psfImages).match(frame)
        foot, peak, bbox = numpyToStack(images, coords[0], (15, 3))
        src = mes.source.init_source(frame=frame,
                                     peak=peak,
                                     observation=observation,
                                     bbox=bbox,
                                     thresh=0)
        # Get the HeavyFootprint
        peakSchema = PeakTable.makeMinimalSchema()
        hFoot = mes.morphToHeavy(src, peakSchema=peakSchema)
        hBBox = hFoot.getBBox()

        hMorph = multiband.heavyFootprintToImage(hFoot, fill=0).image.array
        sBbox = scarlet.bbox.Box.from_data(src.morph)
        self.assertFloatsAlmostEqual(
            hMorph, sBbox.extract_from(src.morph.astype(np.float32)))
        self.assertEqual(hBBox.getMinX(), sBbox.start[-1])
        self.assertEqual(hBBox.getMinY(), sBbox.start[-2])
        self.assertEqual(hBBox.getMaxX(), sBbox.stop[-1] - 1)
        self.assertEqual(hBBox.getMaxY(), sBbox.stop[-2] - 1)

        peaks = hFoot.getPeaks()
        self.assertEqual(len(peaks), 1)
        hPeak = peaks[0]
        self.assertEqual(hPeak.getIx(), coords[0][1])
        self.assertEqual(hPeak.getIy(), coords[0][0])

        # Test Model to Heavy
        filters = [f for f in "grizy"]
        hFoot = mes.modelToHeavy(src, filters, bbox.getMin(), observation)
        hModel = hFoot.getImage(fill=0).image.array

        self.assertEqual(bbox, hFoot.getBBox())
        self.assertFloatsAlmostEqual(hModel,
                                     observation.render(src.get_model()),
                                     rtol=1e-4,
                                     atol=1e-4)
    def test_to_heavy(self):
        shape = (5, 31, 55)
        B, Ny, Nx = shape
        coords = [(20, 10), (10, 30), (17, 42)]
        result = initData(shape, coords, [3, 2, 1])
        targetPsfImage, psfImages, images, channels, seds, morphs, targetPsf, psfs = result
        images = images.astype(np.float32)
        seds = seds.astype(np.float32)

        frame = scarlet.Frame(shape, psf=targetPsf, channels=np.arange(B))
        observation = scarlet.Observation(images, psf=psfImages, channels=np.arange(B)).match(frame)
        foot, peak, bbox = numpyToStack(images, coords[0], (15, 3))
        xmin = bbox.getMinX()
        ymin = bbox.getMinY()
        center = np.array([peak.getIy()-ymin, peak.getIx()-xmin], dtype=int)
        src = init_source(frame=frame, center=center, observations=[observation], thresh=0)

        # Convolve the model with the observed PSF
        model = src.get_model(frame=src.frame)
        model = observation.render(model)
    def test_deblend_task(self):
        # Set the random seed so that the noise field is unaffected
        np.random.seed(0)
        # Test that executing the deblend task works
        # In the future we can have more detailed tests,
        # but for now this at least ensures that the task isn't broken
        shape = (5, 31, 55)
        coords = [(15, 25), (10, 30), (17, 38)]
        amplitudes = [80, 60, 90]
        result = initData(shape, coords, amplitudes)
        targetPsfImage, psfImages, images, channels, seds, morphs, targetPsf, psfs = result
        B, Ny, Nx = shape

        # Add some noise, otherwise the task will blow up due to
        # zero variance
        noise = 10 * (np.random.rand(*images.shape).astype(np.float32) - .5)
        images += noise

        filters = "grizy"
        _images = afwImage.MultibandMaskedImage.fromArrays(
            filters, images.astype(np.float32), None, noise)
        coadds = [
            afwImage.Exposure(img, dtype=img.image.array.dtype)
            for img in _images
        ]
        coadds = afwImage.MultibandExposure.fromExposures(filters, coadds)
        for b, coadd in enumerate(coadds):
            coadd.setPsf(psfs[b])

        schema = SourceCatalog.Table.makeMinimalSchema()

        detectionTask = SourceDetectionTask(schema=schema)
        config = ScarletDeblendTask.ConfigClass()
        config.maxIter = 300
        deblendTask = ScarletDeblendTask(schema=schema, config=config)

        table = SourceCatalog.Table.make(schema)
        detectionResult = detectionTask.run(table, coadds["r"])
        catalog = detectionResult.sources
        self.assertEqual(len(catalog), 1)
        _, result = deblendTask.run(coadds, catalog)
    def test_to_heavy(self):
        shape = (5, 31, 55)
        B, Ny, Nx = shape
        coords = [(20, 10), (10, 30), (17, 42)]
        result = initData(shape, coords, [3, 2, 1])
        targetPsfImage, psfImages, images, channels, seds, morphs, targetPsf, psfs = result
        images = images.astype(np.float32)
        seds = seds.astype(np.float32)

        frame = scarlet.Frame(shape, psfs=targetPsf, channels=np.arange(B))
        observation = scarlet.Observation(images, psfs=psfImages, channels=np.arange(B)).match(frame)
        foot, peak, bbox = numpyToStack(images, coords[0], (15, 3))
        xmin = bbox.getMinX()
        ymin = bbox.getMinY()
        center = np.array([peak.getIy()-ymin, peak.getIx()-xmin], dtype=int)
        src = initSource(frame=frame, center=center, observation=observation, thresh=0, downgrade=False)

        # Convolve the model with the observed PSF
        model = src.get_model(frame=src.frame)
        model = observation.render(model)

        # Test Model to Heavy
        filters = [f for f in "grizy"]
        src.detectedPeak = peak
        hFoot = mes.source.modelToHeavy(src, filters, bbox.getMin(), observation)
        hModel = hFoot.getImage(fill=0).image.array

        self.assertEqual(bbox, hFoot.getBBox())
        self.assertFloatsAlmostEqual(hModel, model, rtol=1e-4, atol=1e-4)

        # Test the peak in each band
        for single in hFoot:
            peaks = single.getPeaks()
            self.assertEqual(len(peaks), 1)
            hPeak = peaks[0]
            self.assertEqual(hPeak.getIx()-xmin, coords[0][1])
            self.assertEqual(hPeak.getIy()-ymin, coords[0][0])
    def test_deblend_task(self):
        # Set the random seed so that the noise field is unaffected
        np.random.seed(0)
        shape = (5, 100, 115)
        coords = [
            # blend
            (15, 25),
            (10, 30),
            (17, 38),
            # isolated source
            (85, 90),
        ]
        amplitudes = [
            # blend
            80,
            60,
            90,
            # isolated source
            20,
        ]
        result = initData(shape, coords, amplitudes)
        targetPsfImage, psfImages, images, channels, seds, morphs, targetPsf, psfs = result
        B, Ny, Nx = shape

        # Add some noise, otherwise the task will blow up due to
        # zero variance
        noise = 10 * (np.random.rand(*images.shape).astype(np.float32) - .5)
        images += noise

        filters = "grizy"
        _images = afwImage.MultibandMaskedImage.fromArrays(
            filters, images.astype(np.float32), None, noise)
        coadds = [
            afwImage.Exposure(img, dtype=img.image.array.dtype)
            for img in _images
        ]
        coadds = afwImage.MultibandExposure.fromExposures(filters, coadds)
        for b, coadd in enumerate(coadds):
            coadd.setPsf(psfs[b])

        schema = SourceCatalog.Table.makeMinimalSchema()

        detectionTask = SourceDetectionTask(schema=schema)

        # Adjust config options to test skipping parents
        config = ScarletDeblendTask.ConfigClass()
        config.maxIter = 100
        config.maxFootprintArea = 1000
        config.maxNumberOfPeaks = 4
        deblendTask = ScarletDeblendTask(schema=schema, config=config)

        table = SourceCatalog.Table.make(schema)
        detectionResult = detectionTask.run(table, coadds["r"])
        catalog = detectionResult.sources

        # Add a footprint that is too large
        src = catalog.addNew()
        halfLength = int(np.ceil(np.sqrt(config.maxFootprintArea) + 1))
        ss = SpanSet.fromShape(halfLength, Stencil.BOX, offset=(50, 50))
        bigfoot = Footprint(ss)
        bigfoot.addPeak(50, 50, 100)
        src.setFootprint(bigfoot)

        # Add a footprint with too many peaks
        src = catalog.addNew()
        ss = SpanSet.fromShape(10, Stencil.BOX, offset=(75, 20))
        denseFoot = Footprint(ss)
        for n in range(config.maxNumberOfPeaks + 1):
            denseFoot.addPeak(70 + 2 * n, 15 + 2 * n, 10 * n)
        src.setFootprint(denseFoot)

        # Run the deblender
        result = deblendTask.run(coadds, catalog)

        # Make sure that the catalogs have the same sources in all bands,
        # and check that band-independent columns are equal
        bandIndependentColumns = [
            "id",
            "parent",
            "deblend_nPeaks",
            "deblend_nChild",
            "deblend_peak_center_x",
            "deblend_peak_center_y",
            "deblend_runtime",
            "deblend_iterations",
            "deblend_logL",
            "deblend_spectrumInitFlag",
            "deblend_blendConvergenceFailedFlag",
        ]
        self.assertEqual(len(filters), len(result))
        ref = result[filters[0]]
        for f in filters[1:]:
            for col in bandIndependentColumns:
                np.testing.assert_array_equal(result[f][col], ref[col])

        # Check that other columns are consistent
        for f, _catalog in result.items():
            parents = _catalog[_catalog["parent"] == 0]
            # Check that the number of deblended children is consistent
            self.assertEqual(np.sum(_catalog["deblend_nChild"]),
                             len(_catalog) - len(parents))

            for parent in parents:
                children = _catalog[_catalog["parent"] == parent.get("id")]
                # Check that nChild is set correctly
                self.assertEqual(len(children), parent.get("deblend_nChild"))
                # Check that parent columns are propagated to their children
                for parentCol, childCol in config.columnInheritance.items():
                    np.testing.assert_array_equal(parent.get(parentCol),
                                                  children[childCol])

            children = _catalog[_catalog["parent"] != 0]
            for child in children:
                fp = child.getFootprint()
                img = heavyFootprintToImage(fp)
                # Check that the flux at the center is correct.
                # Note: this only works in this test image because the
                # detected peak is in the same location as the scarlet peak.
                # If the peak is shifted, the flux value will be correct
                # but deblend_peak_center is not the correct location.
                px = child.get("deblend_peak_center_x")
                py = child.get("deblend_peak_center_y")
                flux = img.image[Point2I(px, py)]
                self.assertEqual(flux, child.get("deblend_peak_instFlux"))

                # Check that the peak positions match the catalog entry
                peaks = fp.getPeaks()
                self.assertEqual(px, peaks[0].getIx())
                self.assertEqual(py, peaks[0].getIy())

            # Check that all sources have the correct number of peaks
            for src in _catalog:
                fp = src.getFootprint()
                self.assertEqual(len(fp.peaks), src.get("deblend_nPeaks"))

            # Check that only the large foorprint was flagged as too big
            largeFootprint = np.zeros(len(_catalog), dtype=bool)
            largeFootprint[2] = True
            np.testing.assert_array_equal(largeFootprint,
                                          _catalog["deblend_parentTooBig"])

            # Check that only the dense foorprint was flagged as too dense
            denseFootprint = np.zeros(len(_catalog), dtype=bool)
            denseFootprint[3] = True
            np.testing.assert_array_equal(denseFootprint,
                                          _catalog["deblend_tooManyPeaks"])

            # Check that only the appropriate parents were skipped
            skipped = largeFootprint | denseFootprint
            np.testing.assert_array_equal(skipped, _catalog["deblend_skipped"])