def on_batch_end(self, state: State): names = state.batch_in[self.outpath_key] images = state.batch_in[self.input_key] images = utils.tensor_to_ndimage(images.detach().cpu(), dtype=np.uint8) for image, name in zip(images, names): fname = self.get_image_path(state, name, self.filename_suffix) imageio.imwrite(fname, image)
def on_batch_end(self, state: State): names = state.batch_in[self.outpath_key] images = state.batch_in[self.input_key] masks = state.batch_out[self.output_key] images = utils.tensor_to_ndimage(images.detach().cpu()) for name, image, mask in zip(names, images, masks): image = mask_to_overlay_image(image, mask, self.mask_strength) fname = self.get_image_path(state, name, self.filename_suffix) imageio.imwrite(fname, image)
def on_batch_end(self, runner: IRunner): """Save batch of images. Args: runner (IRunner): current runner """ names = runner.input[self.outpath_key] images = runner.input[self.input_key] images = utils.tensor_to_ndimage(images.detach().cpu(), dtype=np.uint8) for image, name in zip(images, names): fname = self._get_image_path(runner.logdir, name) imageio.imwrite(fname, image)
def on_batch_end(self, runner: IRunner): """Save batch of images with overlay. Args: runner (IRunner): current runner """ names = runner.input[self.outpath_key] images = runner.input[self.input_key] masks = runner.output[self.output_key] images = utils.tensor_to_ndimage(images.detach().cpu()) for name, image, mask in zip(names, images, masks): image = mask_to_overlay_image(image, mask, self.mask_strength) fname = self._get_image_path(runner.logdir, name) imageio.imwrite(fname, image)
def on_batch_end(self, runner: IRunner): """Batch end hook. Args: runner (IRunner): current runner """ lm = runner.loader_name names = runner.input.get(self.name_key, []) features = runner.input[self.input_key].detach().cpu() images = utils.tensor_to_ndimage(features) logits = runner.output[self.output_key] logits = ( torch.unsqueeze_(logits, dim=1) if len(logits.shape) < 4 else logits ) if self.mask_type == "soft": probabilities = torch.sigmoid(logits) else: probabilities = F.softmax(logits, dim=1) probabilities = probabilities.detach().cpu().numpy() masks = [] for probability in probabilities: mask = np.zeros_like(probability[0], dtype=np.int32) for i, ch in enumerate(probability): mask[ch >= self.threshold] = i + 1 masks.append(mask) for index, (image, mask) in enumerate(zip(images, masks)): try: suffix = names[index] except IndexError: suffix = f"{self.counter:06d}" self.counter += 1 mask = label2rgb(mask, bg_label=0) image = ( image * (1 - self.mask_strength) + mask * self.mask_strength ) image = (image * 255).clip(0, 255).round().astype(np.uint8) filename = f"{self.out_prefix}/{lm}/{suffix}.jpg" imageio.imwrite(filename, image)
def on_batch_end(self, state: State): names = state.batch_in[self.outpath_key] images = state.batch_in[self.input_key] gtmasks = state.batch_in[self.input_target_key] images = utils.tensor_to_ndimage(images.detach().cpu()) gtmasks = gtmasks.detach().cpu().squeeze(1).numpy() predmasks = state.batch_out[self.output_key] # print('img %s, gt %s, pred %s' %(images.shape, gtmasks.shape, predmasks.shape)) for name, image, gtmask, predmask in zip(names, images, gtmasks, predmasks): # image = mask_to_overlay_image(image, mask, self.mask_strength) dice = dice_score(gtmask, predmask) self.dice_list.append(dice) plot_obj = PlotSeg2Image(image[..., 1]) plot_obj.put_on_mask(gtmask, color='r', is_bbox_on=False) plot_obj.put_on_mask(predmask, color='g', is_bbox_on=False) fname = self.get_image_path(state, name, '%0.4f' % dice) # print('check2:', str(fname)) plot_obj.save_fig(fname)
model=model, loader=infer_loader, resume=f"{logdir}/checkpoints/best.pth", verbose=False, ) print(type(predictions)) print(predictions.shape) # In[22]: threshold = 0.5 max_count = 5 for i, (features, logits) in enumerate(zip(test_dataset, predictions)): image = utils.tensor_to_ndimage(features["image"]) mask_ = torch.from_numpy(logits[0]).sigmoid() mask = utils.detach(mask_ > threshold).astype("float") show_examples(name="", image=image, mask=mask) if i >= max_count: break # ## Model tracing # # Catalyst allows you to use Runner to make [tracing](https://pytorch.org/docs/stable/jit.html) models. # # > How to do this in the Config API, we wrote in [our blog (issue \#2)](https://github.com/catalyst-team/catalyst-info#catalyst-info-2-tracing-with-torchjit) #