def stitch(infns, registration_method, weaving_method): """ Stitches a set of tiles. infns: file names of tiles method: weaving method (WEAVER_MEAN or WEAVER_COLLAGE) returns list of data arrays containing the stitched images for every stream """ def leader_quality(da): """ Function for sorting different streams. Use largest EM stream first, then other EM streams, then other types of streams sorted by their size. return int: The bigger the more leadership """ # For now, we prefer a lot the EM images, because they are usually the # one with the smallest FoV and the most contrast if da.metadata[model.MD_ACQ_TYPE] == model.MD_AT_EM: # SEM stream return numpy.prod(da.shape) # More pixel to find the overlap else: # A lot less likely return numpy.prod(da.shape) / 100 da_streams = [] # for each stream, a list of DataArrays for fn in infns: # Read data converter = dataio.find_fittest_converter(fn) # TODO: use open_data/DataArrayShadow when converter support it das = converter.read_data(fn) logging.debug("Got %d streams from file %s", len(das), fn) # Remove the DAs we don't want to (cannot) stitch das = add_acq_type_md(das) das = [da for da in das if da.metadata[model.MD_ACQ_TYPE] not in \ (model.MD_AT_AR, model.MD_AT_SPECTRUM)] # Add sorted DAs to list das = sorted(das, key=leader_quality, reverse=True) da_streams.append(tuple(das)) def get_acq_time(das): return das[0].metadata.get(model.MD_ACQ_DATE, 0) da_streams = sorted(da_streams, key=get_acq_time) das_registered = stitching.register(da_streams, registration_method) # Weave every stream st_data = [] for s in range(len(das_registered[0])): streams = [] for da in das_registered: streams.append(da[s]) da = stitching.weave(streams, weaving_method) da.metadata[model.MD_DIMS] = "YX" st_data.append(da) return st_data
def test_real_images_identity(self): """ Test register wrapper function """ for img in IMGS: conv = find_fittest_converter(img) data = conv.read_data(img)[0] img = ensure2DImage(data) num = 2 o = 0.2 a = "horizontalZigzag" [tiles, pos] = decompose_image(img, o, num, a, False) upd_tiles = register(tiles, method=REGISTER_IDENTITY) for i in range(len(upd_tiles)): calculatedPosition = upd_tiles[i].metadata[model.MD_POS] self.assertAlmostEqual(calculatedPosition[0], pos[i][0], places=1) self.assertAlmostEqual(calculatedPosition[1], pos[i][1], places=1)
def _stitchTiles(self, da_list): """ Stitch the acquired tiles to create a complete view of the required total area :return: (list of DataArrays): a stitched data for each stream acquisition """ st_data = [] logging.info("Computing big image out of %d images", len(da_list)) # TODO: Do this registration step in a separate thread while acquiring das_registered = stitching.register(da_list) weaving_method = WEAVER_COLLAGE_REVERSE # Method used for SECOM logging.info("Using weaving method WEAVER_COLLAGE_REVERSE.") # Weave every stream if isinstance(das_registered[0], tuple): for s in range(len(das_registered[0])): streams = [] for da in das_registered: streams.append(da[s]) da = stitching.weave(streams, weaving_method) st_data.append(da) else: da = stitching.weave(das_registered, weaving_method) st_data.append(da) return st_data
def acquire(self, dlg): main_data = self.main_app.main_data str_ctrl = self._tab.streambar_controller str_ctrl.pauseStreams() dlg.pauseSettings() self._unsubscribe_vas() orig_pos = main_data.stage.position.value trep = (self.nx.value, self.ny.value) nb = trep[0] * trep[1] # It's not a big deal if it was a bad guess as we'll use the actual data # before the first move sfov = self._guess_smallest_fov() fn = self.filename.value exporter = dataio.find_fittest_converter(fn) fn_bs, fn_ext = udataio.splitext(fn) ss, stitch_ss = self._get_acq_streams() end = self.estimate_time() + time.time() ft = model.ProgressiveFuture(end=end) self.ft = ft # allows future to be canceled in show_dlg after closing window ft.running_subf = model.InstantaneousFuture() ft._task_state = RUNNING ft._task_lock = threading.Lock() ft.task_canceller = self._cancel_acquisition # To allow cancelling while it's running ft.set_running_or_notify_cancel() # Indicate the work is starting now dlg.showProgress(ft) # For stitching only da_list = [] # for each position, a list of DataArrays i = 0 prev_idx = [0, 0] try: for ix, iy in self._generate_scanning_indices(trep): logging.debug("Acquiring tile %dx%d", ix, iy) self._move_to_tile((ix, iy), orig_pos, sfov, prev_idx) prev_idx = ix, iy # Update the progress bar ft.set_progress(end=self.estimate_time(nb - i) + time.time()) ft.running_subf = acq.acquire(ss) das, e = ft.running_subf.result() # blocks until all the acquisitions are finished if e: logging.warning("Acquisition for tile %dx%d partially failed: %s", ix, iy, e) if ft._task_state == CANCELLED: raise CancelledError() # TODO: do in a separate thread fn_tile = "%s-%.5dx%.5d%s" % (fn_bs, ix, iy, fn_ext) logging.debug("Will save data of tile %dx%d to %s", ix, iy, fn_tile) exporter.export(fn_tile, das) if ft._task_state == CANCELLED: raise CancelledError() if self.stitch.value: # Sort tiles (largest sem on first position) da_list.append(self.sort_das(das, stitch_ss)) # Check the FoV is correct using the data, and if not update if i == 0: sfov = self._check_fov(das, sfov) i += 1 # Move stage to original position main_data.stage.moveAbs(orig_pos) # Stitch SEM and CL streams st_data = [] if self.stitch.value and (not da_list or not da_list[0]): # if only AR or Spectrum are acquired logging.warning("No stream acquired that can be used for stitching.") elif self.stitch.value: logging.info("Acquisition completed, now stitching...") ft.set_progress(end=self.estimate_time(0) + time.time()) logging.info("Computing big image out of %d images", len(da_list)) das_registered = stitching.register(da_list) # Select weaving method # On a Sparc system the mean weaver gives the best result since it # smoothes the transitions between tiles. However, using this weaver on the # Secom/Delphi generates an image with dark stripes in the overlap regions which are # the result of carbon decomposition effects that typically occur in samples imaged # by these systems. To mediate this, we use the # collage_reverse weaver that only shows the overlap region of the tile that # was imaged first. if self.microscope.role in ("secom", "delphi"): weaving_method = WEAVER_COLLAGE_REVERSE logging.info("Using weaving method WEAVER_COLLAGE_REVERSE.") else: weaving_method = WEAVER_MEAN logging.info("Using weaving method WEAVER_MEAN.") # Weave every stream if isinstance(das_registered[0], tuple): for s in range(len(das_registered[0])): streams = [] for da in das_registered: streams.append(da[s]) da = stitching.weave(streams, weaving_method) da.metadata[model.MD_DIMS] = "YX" # TODO: do it in the weaver st_data.append(da) else: da = stitching.weave(das_registered, weaving_method) st_data.append(da) # Save exporter = dataio.find_fittest_converter(fn) if exporter.CAN_SAVE_PYRAMID: exporter.export(fn, st_data, pyramid=True) else: logging.warning("File format doesn't support saving image in pyramidal form") exporter.export(fn, st_data) ft.set_result(None) # Indicate it's over # End of the (completed) acquisition if ft._task_state == CANCELLED: raise CancelledError() dlg.Close() # Open analysis tab if st_data: self.showAcquisition(fn) # TODO: also export a full image (based on reported position, or based # on alignment detection) except CancelledError: logging.debug("Acquisition cancelled") dlg.resumeSettings() except Exception as ex: logging.exception("Acquisition failed.") ft.running_subf.cancel() ft.set_result(None) # Show also in the window. It will be hidden next time a setting is changed. self._dlg.setAcquisitionInfo("Acquisition failed: %s" % (ex,), lvl=logging.ERROR) finally: logging.info("Tiled acquisition ended") main_data.stage.moveAbs(orig_pos)
def test_dep_tiles(self): """ Test register wrapper function, when dependent tiles are present """ # Test on 3 layers of the same image create by decompose_image for img in IMGS: conv = find_fittest_converter(img) data = conv.read_data(img)[0] img = ensure2DImage(data) num = 3 o = 0.3 a = "horizontalZigzag" [tiles, pos] = decompose_image(img, o, num, a) all_tiles = [] for i in range(len(pos)): all_tiles.append((tiles[i], tiles[i], tiles[i])) all_tiles_new = register(all_tiles) for i in range(len(pos)): tile_pos = all_tiles_new[i][0].metadata[model.MD_POS] dep_pos = (all_tiles_new[i][1].metadata[model.MD_POS], all_tiles_new[i][2].metadata[model.MD_POS]) diff1 = abs(tile_pos[0] - pos[i][0]) diff2 = abs(tile_pos[1] - pos[i][1]) # allow difference of 5% of tile px_size = tiles[i].metadata[model.MD_PIXEL_SIZE] margin = 0.05 * tiles[i].shape[0] * px_size[0] self.assertLessEqual(diff1, margin, "Failed for %s tiles, %s overlap and %s method," % (num, o, a) + " %f != %f" % (tile_pos[0], pos[i][0])) self.assertLessEqual(diff2, margin, "Failed for %s tiles, %s overlap and %s method," % (num, o, a) + " %f != %f" % (tile_pos[1], pos[i][1])) for j in range(2): diff1 = abs(dep_pos[j][0] - pos[i][0]) self.assertLessEqual(diff1, margin, "Failed for %s tiles, %s overlap and %s method," % (num, o, a) + " %f != %f" % (dep_pos[j][0], pos[i][0])) diff2 = abs(dep_pos[j][1] - pos[i][1]) self.assertLessEqual(diff2, margin, "Failed for %s tiles, %s overlap and %s method," % (num, o, a) + " %f != %f" % (dep_pos[j][1], pos[i][1])) # Test with shifted dependent tiles [tiles, pos] = decompose_image(img, o, num, a) # Add shift dep_tiles = copy.deepcopy(tiles) rnd1 = [random.randrange(-1000, 1000) for _ in range(len(pos))] rnd2 = [random.randrange(-1000, 1000) for _ in range(len(pos))] for i in range(len(dep_tiles)): p = (dep_tiles[i].metadata[model.MD_POS][0] + rnd1[i] * px_size[0], dep_tiles[i].metadata[model.MD_POS][1] + rnd2[i] * px_size[1]) dep_tiles[i].metadata[model.MD_POS] = p all_tiles = [] for i in range(len(pos)): all_tiles.append((tiles[i], dep_tiles[i], dep_tiles[i])) all_tiles_new = register(all_tiles) for i in range(len(pos)): tile_pos = all_tiles_new[i][0].metadata[model.MD_POS] dep_pos = (all_tiles_new[i][1].metadata[model.MD_POS], all_tiles_new[i][2].metadata[model.MD_POS]) diff1 = abs(tile_pos[0] - pos[i][0]) diff2 = abs(tile_pos[1] - pos[i][1]) # allow difference of 1% of tile px_size = tiles[i].metadata[model.MD_PIXEL_SIZE] margin1 = 0.01 * tiles[i].shape[0] * px_size[0] margin2 = 0.01 * tiles[i].shape[1] * px_size[1] self.assertLessEqual(diff1, margin1, "Failed for %s tiles, %s overlap and %s method," % (num, o, a) + " %f != %f" % (tile_pos[0], pos[i][0])) self.assertLessEqual(diff2, margin2, "Failed for %s tiles, %s overlap and %s method," % (num, o, a) + " %f != %f" % (tile_pos[1], pos[i][1])) for j in range(2): self.assertAlmostEqual(dep_pos[j][0], tile_pos[0] + rnd1[i] * px_size[0]) self.assertAlmostEqual(dep_pos[j][1], tile_pos[1] + rnd2[i] * px_size[1])
def acquire(self, dlg): main_data = self.main_app.main_data str_ctrl = self._tab.streambar_controller str_ctrl.pauseStreams() dlg.pauseSettings() self._unsubscribe_vas() orig_pos = main_data.stage.position.value trep = (self.nx.value, self.ny.value) nb = trep[0] * trep[1] # It's not a big deal if it was a bad guess as we'll use the actual data # before the first move sfov = self._guess_smallest_fov() fn = self.filename.value exporter = dataio.find_fittest_converter(fn) fn_bs, fn_ext = udataio.splitext(fn) ss, stitch_ss = self._get_acq_streams() end = self.estimate_time() + time.time() ft = model.ProgressiveFuture(end=end) self.ft = ft # allows future to be canceled in show_dlg after closing window ft.running_subf = model.InstantaneousFuture() ft._task_state = RUNNING ft._task_lock = threading.Lock() ft.task_canceller = self._cancel_acquisition # To allow cancelling while it's running ft.set_running_or_notify_cancel() # Indicate the work is starting now dlg.showProgress(ft) # For stitching only da_list = [] # for each position, a list of DataArrays i = 0 prev_idx = [0, 0] try: for ix, iy in self._generate_scanning_indices(trep): logging.debug("Acquiring tile %dx%d", ix, iy) self._move_to_tile((ix, iy), orig_pos, sfov, prev_idx) prev_idx = ix, iy # Update the progress bar ft.set_progress(end=self.estimate_time(nb - i) + time.time()) ft.running_subf = acqmng.acquire( ss, self.main_app.main_data.settings_obs) das, e = ft.running_subf.result( ) # blocks until all the acquisitions are finished if e: logging.warning( "Acquisition for tile %dx%d partially failed: %s", ix, iy, e) if ft._task_state == CANCELLED: raise CancelledError() # TODO: do in a separate thread fn_tile = "%s-%.5dx%.5d%s" % (fn_bs, ix, iy, fn_ext) logging.debug("Will save data of tile %dx%d to %s", ix, iy, fn_tile) exporter.export(fn_tile, das) if ft._task_state == CANCELLED: raise CancelledError() if self.stitch.value: # Sort tiles (largest sem on first position) da_list.append(self.sort_das(das, stitch_ss)) # Check the FoV is correct using the data, and if not update if i == 0: sfov = self._check_fov(das, sfov) i += 1 # Move stage to original position main_data.stage.moveAbs(orig_pos) # Stitch SEM and CL streams st_data = [] if self.stitch.value and (not da_list or not da_list[0]): # if only AR or Spectrum are acquired logging.warning( "No stream acquired that can be used for stitching.") elif self.stitch.value: logging.info("Acquisition completed, now stitching...") ft.set_progress(end=self.estimate_time(0) + time.time()) logging.info("Computing big image out of %d images", len(da_list)) das_registered = stitching.register(da_list) # Select weaving method # On a Sparc system the mean weaver gives the best result since it # smoothes the transitions between tiles. However, using this weaver on the # Secom/Delphi generates an image with dark stripes in the overlap regions which are # the result of carbon decomposition effects that typically occur in samples imaged # by these systems. To mediate this, we use the # collage_reverse weaver that only shows the overlap region of the tile that # was imaged first. if self.microscope.role in ("secom", "delphi"): weaving_method = WEAVER_COLLAGE_REVERSE logging.info( "Using weaving method WEAVER_COLLAGE_REVERSE.") else: weaving_method = WEAVER_MEAN logging.info("Using weaving method WEAVER_MEAN.") # Weave every stream if isinstance(das_registered[0], tuple): for s in range(len(das_registered[0])): streams = [] for da in das_registered: streams.append(da[s]) da = stitching.weave(streams, weaving_method) da.metadata[ model.MD_DIMS] = "YX" # TODO: do it in the weaver st_data.append(da) else: da = stitching.weave(das_registered, weaving_method) st_data.append(da) # Save exporter = dataio.find_fittest_converter(fn) if exporter.CAN_SAVE_PYRAMID: exporter.export(fn, st_data, pyramid=True) else: logging.warning( "File format doesn't support saving image in pyramidal form" ) exporter.export(fn, st_data) ft.set_result(None) # Indicate it's over # End of the (completed) acquisition if ft._task_state == CANCELLED: raise CancelledError() dlg.Close() # Open analysis tab if st_data: popup.show_message(self.main_app.main_frame, "Tiled acquisition complete", "Will display stitched image") self.showAcquisition(fn) else: popup.show_message(self.main_app.main_frame, "Tiled acquisition complete", "Will display last tile") # It's easier to know the last filename, and it's also the most # interesting for the user, as if something went wrong (eg, focus) # it's the tile the most likely to show it. self.showAcquisition(fn_tile) # TODO: also export a full image (based on reported position, or based # on alignment detection) except CancelledError: logging.debug("Acquisition cancelled") dlg.resumeSettings() except Exception as ex: logging.exception("Acquisition failed.") ft.running_subf.cancel() ft.set_result(None) # Show also in the window. It will be hidden next time a setting is changed. self._dlg.setAcquisitionInfo("Acquisition failed: %s" % (ex, ), lvl=logging.ERROR) finally: logging.info("Tiled acquisition ended") main_data.stage.moveAbs(orig_pos)