Пример #1
0
def stitch(infns, registration_method, weaving_method):
    """
    Stitches a set of tiles.
    infns: file names of tiles
    method: weaving method (WEAVER_MEAN or WEAVER_COLLAGE)
    returns list of data arrays containing the stitched images for every stream
    """

    def leader_quality(da):
        """
        Function for sorting different streams. Use largest EM stream first, then other EM streams,
        then other types of streams sorted by their size.
        return int: The bigger the more leadership
        """
        # For now, we prefer a lot the EM images, because they are usually the
        # one with the smallest FoV and the most contrast
        if da.metadata[model.MD_ACQ_TYPE] == model.MD_AT_EM:  # SEM stream
            return numpy.prod(da.shape)  # More pixel to find the overlap
        else:
            # A lot less likely
            return numpy.prod(da.shape) / 100

    da_streams = []  # for each stream, a list of DataArrays
    for fn in infns:
        # Read data
        converter = dataio.find_fittest_converter(fn)
        # TODO: use open_data/DataArrayShadow when converter support it
        das = converter.read_data(fn)
        logging.debug("Got %d streams from file %s", len(das), fn)

        # Remove the DAs we don't want to (cannot) stitch
        das = add_acq_type_md(das)
        das = [da for da in das if da.metadata[model.MD_ACQ_TYPE] not in \
               (model.MD_AT_AR, model.MD_AT_SPECTRUM)]

        # Add sorted DAs to list
        das = sorted(das, key=leader_quality, reverse=True)
        da_streams.append(tuple(das))

    def get_acq_time(das):
        return das[0].metadata.get(model.MD_ACQ_DATE, 0)

    da_streams = sorted(da_streams, key=get_acq_time)

    das_registered = stitching.register(da_streams, registration_method)

    # Weave every stream
    st_data = []
    for s in range(len(das_registered[0])):
        streams = []
        for da in das_registered:
            streams.append(da[s])
        da = stitching.weave(streams, weaving_method)
        da.metadata[model.MD_DIMS] = "YX"
        st_data.append(da)

    return st_data
Пример #2
0
def stitch(infns, registration_method, weaving_method):
    """
    Stitches a set of tiles.
    infns: file names of tiles
    method: weaving method (WEAVER_MEAN or WEAVER_COLLAGE)
    returns list of data arrays containing the stitched images for every stream
    """

    def leader_quality(da):
        """
        Function for sorting different streams. Use largest EM stream first, then other EM streams,
        then other types of streams sorted by their size.
        return int: The bigger the more leadership
        """
        # For now, we prefer a lot the EM images, because they are usually the
        # one with the smallest FoV and the most contrast
        if da.metadata[model.MD_ACQ_TYPE] == model.MD_AT_EM:  # SEM stream
            return numpy.prod(da.shape)  # More pixel to find the overlap
        else:
            # A lot less likely
            return numpy.prod(da.shape) / 100

    da_streams = []  # for each stream, a list of DataArrays
    for fn in infns:
        # Read data
        converter = dataio.find_fittest_converter(fn)
        # TODO: use open_data/DataArrayShadow when converter support it
        das = converter.read_data(fn)
        logging.debug("Got %d streams from file %s", len(das), fn)

        # Remove the DAs we don't want to (cannot) stitch
        das = add_acq_type_md(das)
        das = [da for da in das if da.metadata[model.MD_ACQ_TYPE] not in \
               (model.MD_AT_AR, model.MD_AT_SPECTRUM)]

        # Add sorted DAs to list
        das = sorted(das, key=leader_quality, reverse=True)
        da_streams.append(tuple(das))

    def get_acq_time(das):
        return das[0].metadata.get(model.MD_ACQ_DATE, 0)

    da_streams = sorted(da_streams, key=get_acq_time)

    das_registered = stitching.register(da_streams, registration_method)

    # Weave every stream
    st_data = []
    for s in range(len(das_registered[0])):
        streams = []
        for da in das_registered:
            streams.append(da[s])
        da = stitching.weave(streams, weaving_method)
        da.metadata[model.MD_DIMS] = "YX"
        st_data.append(da)

    return st_data
Пример #3
0
    def test_one_tile(self):
        """
        Test that when there is only one tile, it's returned as-is
        """
        img12 = numpy.zeros((2048, 1937), dtype=numpy.uint16) + 4000
        md = {
            model.MD_SW_VERSION: "1.0-test",
            # tiff doesn't support É (but XML does)
            model.MD_DESCRIPTION: u"test",
            model.MD_BPP: 12,
            model.MD_BINNING: (1, 2),  # px, px
            model.MD_PIXEL_SIZE: (1e-6, 2e-5),  # m/px
            model.MD_POS: (1e-3, -30e-3),  # m
            model.MD_EXP_TIME: 1.2,  # s
            model.MD_IN_WL: (500e-9, 520e-9),  # m
            model.MD_DIMS: "YX",
        }
        intile = model.DataArray(img12, md)

        outd = weave([intile], WEAVER_COLLAGE)

        self.assertEqual(outd.shape, intile.shape)
        numpy.testing.assert_array_equal(outd, intile)
        self.assertEqual(outd.metadata, intile.metadata)

        # Same thing but with a typical SEM data
        img8 = numpy.zeros((256, 356), dtype=numpy.uint8) + 40
        md8 = {
            model.MD_DESCRIPTION: u"test sem",
            model.MD_PIXEL_SIZE: (1.3e-6, 1.3e-6),  # m/px
            model.MD_POS: (10e-3, 30e-3),  # m
            model.MD_DWELL_TIME: 1.2e-6,  # s
            model.MD_DIMS: "YX",
        }
        intile = model.DataArray(img8, md8)

        outd = weave([intile], WEAVER_MEAN)

        self.assertEqual(outd.shape, intile.shape)
        numpy.testing.assert_array_equal(outd, intile)
        self.assertEqual(outd.metadata, intile.metadata)
Пример #4
0
    def test_one_tile(self):
        """
        Test that when there is only one tile, it's returned as-is
        """
        img12 = numpy.zeros((2048, 1937), dtype=numpy.uint16) + 4000
        md = {
            model.MD_SW_VERSION: "1.0-test",
            # tiff doesn't support É (but XML does)
            model.MD_DESCRIPTION: u"test",
            model.MD_BPP: 12,
            model.MD_BINNING: (1, 2),  # px, px
            model.MD_PIXEL_SIZE: (1e-6, 2e-5),  # m/px
            model.MD_POS: (1e-3, -30e-3),  # m
            model.MD_EXP_TIME: 1.2,  # s
            model.MD_IN_WL: (500e-9, 520e-9),  # m
        }
        intile = model.DataArray(img12, md)

        outd = weave([intile], WEAVER_COLLAGE)

        self.assertEqual(outd.shape, intile.shape)
        numpy.testing.assert_array_equal(outd, intile)
        self.assertEqual(outd.metadata, intile.metadata)

        # Same thing but with a typical SEM data
        img8 = numpy.zeros((256, 356), dtype=numpy.uint8) + 40
        md8 = {
            model.MD_DESCRIPTION: u"test sem",
            model.MD_PIXEL_SIZE: (1.3e-6, 1.3e-6),  # m/px
            model.MD_POS: (10e-3, 30e-3),  # m
            model.MD_DWELL_TIME: 1.2e-6,  # s
        }
        intile = model.DataArray(img8, md8)

        outd = weave([intile], WEAVER_MEAN)

        self.assertEqual(outd.shape, intile.shape)
        numpy.testing.assert_array_equal(outd, intile)
        self.assertEqual(outd.metadata, intile.metadata)
Пример #5
0
    def _stitchTiles(self, da_list):
        """
        Stitch the acquired tiles to create a complete view of the required total area
        :return: (list of DataArrays): a stitched data for each stream acquisition
        """
        st_data = []
        logging.info("Computing big image out of %d images", len(da_list))
        # TODO: Do this registration step in a separate thread while acquiring
        das_registered = stitching.register(da_list)

        weaving_method = WEAVER_COLLAGE_REVERSE  # Method used for SECOM
        logging.info("Using weaving method WEAVER_COLLAGE_REVERSE.")
        # Weave every stream
        if isinstance(das_registered[0], tuple):
            for s in range(len(das_registered[0])):
                streams = []
                for da in das_registered:
                    streams.append(da[s])
                da = stitching.weave(streams, weaving_method)
                st_data.append(da)
        else:
            da = stitching.weave(das_registered, weaving_method)
            st_data.append(da)
        return st_data
Пример #6
0
    def test_no_seam(self):
        """
        Test on decomposed image
        """

        for img in IMGS:
            conv = find_fittest_converter(img)
            data = conv.read_data(img)[0]
            img = ensure2DImage(data)
            numTiles = [2, 3, 4]
            overlap = [0.2, 0.3, 0.4]

            for n in numTiles:
                for o in overlap:
                    [tiles, _] = decompose_image(
                        img, o, n, "horizontalZigzag", False)

                    w = weave(tiles, WEAVER_MEAN)
                    sz = len(w)
                    numpy.testing.assert_allclose(w, img[:sz, :sz], rtol=1)
Пример #7
0
    def acquire(self, dlg):
        main_data = self.main_app.main_data
        str_ctrl = self._tab.streambar_controller
        str_ctrl.pauseStreams()
        dlg.pauseSettings()
        self._unsubscribe_vas()

        orig_pos = main_data.stage.position.value
        trep = (self.nx.value, self.ny.value)
        nb = trep[0] * trep[1]
        # It's not a big deal if it was a bad guess as we'll use the actual data
        # before the first move
        sfov = self._guess_smallest_fov()
        fn = self.filename.value
        exporter = dataio.find_fittest_converter(fn)
        fn_bs, fn_ext = udataio.splitext(fn)

        ss, stitch_ss = self._get_acq_streams()
        end = self.estimate_time() + time.time()

        ft = model.ProgressiveFuture(end=end)
        self.ft = ft  # allows future to be canceled in show_dlg after closing window
        ft.running_subf = model.InstantaneousFuture()
        ft._task_state = RUNNING
        ft._task_lock = threading.Lock()
        ft.task_canceller = self._cancel_acquisition  # To allow cancelling while it's running
        ft.set_running_or_notify_cancel()  # Indicate the work is starting now
        dlg.showProgress(ft)

        # For stitching only
        da_list = []  # for each position, a list of DataArrays
        i = 0
        prev_idx = [0, 0]
        try:
            for ix, iy in self._generate_scanning_indices(trep):
                logging.debug("Acquiring tile %dx%d", ix, iy)
                self._move_to_tile((ix, iy), orig_pos, sfov, prev_idx)
                prev_idx = ix, iy
                # Update the progress bar
                ft.set_progress(end=self.estimate_time(nb - i) + time.time())

                ft.running_subf = acq.acquire(ss)
                das, e = ft.running_subf.result()  # blocks until all the acquisitions are finished
                if e:
                    logging.warning("Acquisition for tile %dx%d partially failed: %s",
                                    ix, iy, e)

                if ft._task_state == CANCELLED:
                    raise CancelledError()

                # TODO: do in a separate thread
                fn_tile = "%s-%.5dx%.5d%s" % (fn_bs, ix, iy, fn_ext)
                logging.debug("Will save data of tile %dx%d to %s", ix, iy, fn_tile)
                exporter.export(fn_tile, das)

                if ft._task_state == CANCELLED:
                    raise CancelledError()

                if self.stitch.value:
                    # Sort tiles (largest sem on first position)
                    da_list.append(self.sort_das(das, stitch_ss))

                # Check the FoV is correct using the data, and if not update
                if i == 0:
                    sfov = self._check_fov(das, sfov)
                i += 1

            # Move stage to original position
            main_data.stage.moveAbs(orig_pos)

            # Stitch SEM and CL streams
            st_data = []
            if self.stitch.value and (not da_list or not da_list[0]):
                # if only AR or Spectrum are acquired
                logging.warning("No stream acquired that can be used for stitching.")
            elif self.stitch.value:
                logging.info("Acquisition completed, now stitching...")
                ft.set_progress(end=self.estimate_time(0) + time.time())

                logging.info("Computing big image out of %d images", len(da_list))
                das_registered = stitching.register(da_list)

                # Select weaving method
                # On a Sparc system the mean weaver gives the best result since it
                # smoothes the transitions between tiles. However, using this weaver on the
                # Secom/Delphi generates an image with dark stripes in the overlap regions which are
                # the result of carbon decomposition effects that typically occur in samples imaged
                # by these systems. To mediate this, we use the
                # collage_reverse weaver that only shows the overlap region of the tile that
                # was imaged first.
                if self.microscope.role in ("secom", "delphi"):
                    weaving_method = WEAVER_COLLAGE_REVERSE
                    logging.info("Using weaving method WEAVER_COLLAGE_REVERSE.")
                else:
                    weaving_method = WEAVER_MEAN
                    logging.info("Using weaving method WEAVER_MEAN.")

                # Weave every stream
                if isinstance(das_registered[0], tuple):
                    for s in range(len(das_registered[0])):
                        streams = []
                        for da in das_registered:
                            streams.append(da[s])
                        da = stitching.weave(streams, weaving_method)
                        da.metadata[model.MD_DIMS] = "YX"  # TODO: do it in the weaver
                        st_data.append(da)
                else:
                    da = stitching.weave(das_registered, weaving_method)
                    st_data.append(da)

                # Save
                exporter = dataio.find_fittest_converter(fn)
                if exporter.CAN_SAVE_PYRAMID:
                    exporter.export(fn, st_data, pyramid=True)
                else:
                    logging.warning("File format doesn't support saving image in pyramidal form")
                    exporter.export(fn, st_data)

            ft.set_result(None)  # Indicate it's over

            # End of the (completed) acquisition
            if ft._task_state == CANCELLED:
                raise CancelledError()
            dlg.Close()

            # Open analysis tab
            if st_data:
                self.showAcquisition(fn)

            # TODO: also export a full image (based on reported position, or based
            # on alignment detection)
        except CancelledError:
            logging.debug("Acquisition cancelled")
            dlg.resumeSettings()
        except Exception as ex:
            logging.exception("Acquisition failed.")
            ft.running_subf.cancel()
            ft.set_result(None)
            # Show also in the window. It will be hidden next time a setting is changed.
            self._dlg.setAcquisitionInfo("Acquisition failed: %s" % (ex,),
                                         lvl=logging.ERROR)
        finally:
            logging.info("Tiled acquisition ended")
            main_data.stage.moveAbs(orig_pos)
Пример #8
0
    def acquire(self, dlg):
        main_data = self.main_app.main_data
        str_ctrl = self._tab.streambar_controller
        str_ctrl.pauseStreams()
        dlg.pauseSettings()
        self._unsubscribe_vas()

        orig_pos = main_data.stage.position.value
        trep = (self.nx.value, self.ny.value)
        nb = trep[0] * trep[1]
        # It's not a big deal if it was a bad guess as we'll use the actual data
        # before the first move
        sfov = self._guess_smallest_fov()
        fn = self.filename.value
        exporter = dataio.find_fittest_converter(fn)
        fn_bs, fn_ext = udataio.splitext(fn)

        ss, stitch_ss = self._get_acq_streams()
        end = self.estimate_time() + time.time()

        ft = model.ProgressiveFuture(end=end)
        self.ft = ft  # allows future to be canceled in show_dlg after closing window
        ft.running_subf = model.InstantaneousFuture()
        ft._task_state = RUNNING
        ft._task_lock = threading.Lock()
        ft.task_canceller = self._cancel_acquisition  # To allow cancelling while it's running
        ft.set_running_or_notify_cancel()  # Indicate the work is starting now
        dlg.showProgress(ft)

        # For stitching only
        da_list = []  # for each position, a list of DataArrays
        i = 0
        prev_idx = [0, 0]
        try:
            for ix, iy in self._generate_scanning_indices(trep):
                logging.debug("Acquiring tile %dx%d", ix, iy)
                self._move_to_tile((ix, iy), orig_pos, sfov, prev_idx)
                prev_idx = ix, iy
                # Update the progress bar
                ft.set_progress(end=self.estimate_time(nb - i) + time.time())

                ft.running_subf = acqmng.acquire(
                    ss, self.main_app.main_data.settings_obs)
                das, e = ft.running_subf.result(
                )  # blocks until all the acquisitions are finished
                if e:
                    logging.warning(
                        "Acquisition for tile %dx%d partially failed: %s", ix,
                        iy, e)

                if ft._task_state == CANCELLED:
                    raise CancelledError()

                # TODO: do in a separate thread
                fn_tile = "%s-%.5dx%.5d%s" % (fn_bs, ix, iy, fn_ext)
                logging.debug("Will save data of tile %dx%d to %s", ix, iy,
                              fn_tile)
                exporter.export(fn_tile, das)

                if ft._task_state == CANCELLED:
                    raise CancelledError()

                if self.stitch.value:
                    # Sort tiles (largest sem on first position)
                    da_list.append(self.sort_das(das, stitch_ss))

                # Check the FoV is correct using the data, and if not update
                if i == 0:
                    sfov = self._check_fov(das, sfov)
                i += 1

            # Move stage to original position
            main_data.stage.moveAbs(orig_pos)

            # Stitch SEM and CL streams
            st_data = []
            if self.stitch.value and (not da_list or not da_list[0]):
                # if only AR or Spectrum are acquired
                logging.warning(
                    "No stream acquired that can be used for stitching.")
            elif self.stitch.value:
                logging.info("Acquisition completed, now stitching...")
                ft.set_progress(end=self.estimate_time(0) + time.time())

                logging.info("Computing big image out of %d images",
                             len(da_list))
                das_registered = stitching.register(da_list)

                # Select weaving method
                # On a Sparc system the mean weaver gives the best result since it
                # smoothes the transitions between tiles. However, using this weaver on the
                # Secom/Delphi generates an image with dark stripes in the overlap regions which are
                # the result of carbon decomposition effects that typically occur in samples imaged
                # by these systems. To mediate this, we use the
                # collage_reverse weaver that only shows the overlap region of the tile that
                # was imaged first.
                if self.microscope.role in ("secom", "delphi"):
                    weaving_method = WEAVER_COLLAGE_REVERSE
                    logging.info(
                        "Using weaving method WEAVER_COLLAGE_REVERSE.")
                else:
                    weaving_method = WEAVER_MEAN
                    logging.info("Using weaving method WEAVER_MEAN.")

                # Weave every stream
                if isinstance(das_registered[0], tuple):
                    for s in range(len(das_registered[0])):
                        streams = []
                        for da in das_registered:
                            streams.append(da[s])
                        da = stitching.weave(streams, weaving_method)
                        da.metadata[
                            model.MD_DIMS] = "YX"  # TODO: do it in the weaver
                        st_data.append(da)
                else:
                    da = stitching.weave(das_registered, weaving_method)
                    st_data.append(da)

                # Save
                exporter = dataio.find_fittest_converter(fn)
                if exporter.CAN_SAVE_PYRAMID:
                    exporter.export(fn, st_data, pyramid=True)
                else:
                    logging.warning(
                        "File format doesn't support saving image in pyramidal form"
                    )
                    exporter.export(fn, st_data)

            ft.set_result(None)  # Indicate it's over

            # End of the (completed) acquisition
            if ft._task_state == CANCELLED:
                raise CancelledError()
            dlg.Close()

            # Open analysis tab
            if st_data:
                popup.show_message(self.main_app.main_frame,
                                   "Tiled acquisition complete",
                                   "Will display stitched image")
                self.showAcquisition(fn)
            else:
                popup.show_message(self.main_app.main_frame,
                                   "Tiled acquisition complete",
                                   "Will display last tile")
                # It's easier to know the last filename, and it's also the most
                # interesting for the user, as if something went wrong (eg, focus)
                # it's the tile the most likely to show it.
                self.showAcquisition(fn_tile)

            # TODO: also export a full image (based on reported position, or based
            # on alignment detection)
        except CancelledError:
            logging.debug("Acquisition cancelled")
            dlg.resumeSettings()
        except Exception as ex:
            logging.exception("Acquisition failed.")
            ft.running_subf.cancel()
            ft.set_result(None)
            # Show also in the window. It will be hidden next time a setting is changed.
            self._dlg.setAcquisitionInfo("Acquisition failed: %s" % (ex, ),
                                         lvl=logging.ERROR)
        finally:
            logging.info("Tiled acquisition ended")
            main_data.stage.moveAbs(orig_pos)